131
|
1 /* Manipulation of formal and actual parameters of functions and function
|
|
2 calls.
|
145
|
3 Copyright (C) 2017-2020 Free Software Foundation, Inc.
|
131
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify it under
|
|
8 the terms of the GNU General Public License as published by the Free
|
|
9 Software Foundation; either version 3, or (at your option) any later
|
|
10 version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
15 for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "backend.h"
|
|
25 #include "tree.h"
|
|
26 #include "gimple.h"
|
|
27 #include "ssa.h"
|
|
28 #include "cgraph.h"
|
|
29 #include "fold-const.h"
|
145
|
30 #include "tree-eh.h"
|
131
|
31 #include "stor-layout.h"
|
|
32 #include "gimplify.h"
|
|
33 #include "gimple-iterator.h"
|
|
34 #include "gimplify-me.h"
|
145
|
35 #include "tree-cfg.h"
|
131
|
36 #include "tree-dfa.h"
|
|
37 #include "ipa-param-manipulation.h"
|
|
38 #include "print-tree.h"
|
|
39 #include "gimple-pretty-print.h"
|
|
40 #include "builtins.h"
|
145
|
41 #include "tree-ssa.h"
|
|
42 #include "tree-inline.h"
|
131
|
43
|
145
|
44
|
|
45 /* Actual prefixes of different newly synthetized parameters. Keep in sync
|
|
46 with IPA_PARAM_PREFIX_* defines. */
|
|
47
|
|
48 static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT]
|
|
49 = {"SYNTH",
|
|
50 "ISRA",
|
|
51 "simd",
|
|
52 "mask"};
|
131
|
53
|
145
|
54 /* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */
|
|
55
|
|
56 static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT]
|
|
57 = {"IPA_PARAM_OP_UNDEFINED",
|
|
58 "IPA_PARAM_OP_COPY",
|
|
59 "IPA_PARAM_OP_NEW",
|
|
60 "IPA_PARAM_OP_SPLIT"};
|
|
61
|
|
62 /* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of
|
|
63 FNDECL. The function should not be called during LTO WPA phase except for
|
|
64 thunks (or functions with bodies streamed in). */
|
|
65
|
|
66 void
|
|
67 push_function_arg_decls (vec<tree> *args, tree fndecl)
|
131
|
68 {
|
|
69 int count;
|
|
70 tree parm;
|
|
71
|
145
|
72 /* Safety check that we do not attempt to use the function in WPA, except
|
|
73 when the function is a thunk and then we have DECL_ARGUMENTS or when we
|
|
74 have already explicitely loaded its body. */
|
|
75 gcc_assert (!flag_wpa
|
|
76 || DECL_ARGUMENTS (fndecl)
|
|
77 || gimple_has_body_p (fndecl));
|
131
|
78 count = 0;
|
|
79 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
|
|
80 count++;
|
|
81
|
145
|
82 args->reserve_exact (count);
|
131
|
83 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
|
145
|
84 args->quick_push (parm);
|
131
|
85 }
|
|
86
|
145
|
87 /* Fill an empty vector TYPES with trees representing formal parameters of
|
131
|
88 function type FNTYPE. */
|
|
89
|
145
|
90 void
|
|
91 push_function_arg_types (vec<tree> *types, tree fntype)
|
131
|
92 {
|
|
93 int count = 0;
|
|
94 tree t;
|
|
95
|
|
96 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
|
|
97 count++;
|
|
98
|
145
|
99 types->reserve_exact (count);
|
131
|
100 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
|
145
|
101 types->quick_push (TREE_VALUE (t));
|
131
|
102 }
|
|
103
|
145
|
104 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
|
|
105 friendly way, assuming they are meant to be applied to FNDECL. */
|
131
|
106
|
|
107 void
|
145
|
108 ipa_dump_adjusted_parameters (FILE *f,
|
|
109 vec<ipa_adjusted_param, va_gc> *adj_params)
|
131
|
110 {
|
145
|
111 unsigned i, len = vec_safe_length (adj_params);
|
|
112 bool first = true;
|
131
|
113
|
145
|
114 fprintf (f, " IPA adjusted parameters: ");
|
|
115 for (i = 0; i < len; i++)
|
131
|
116 {
|
145
|
117 struct ipa_adjusted_param *apm;
|
|
118 apm = &(*adj_params)[i];
|
131
|
119
|
145
|
120 if (!first)
|
|
121 fprintf (f, " ");
|
|
122 else
|
|
123 first = false;
|
|
124
|
|
125 fprintf (f, "%i. %s %s", i, ipa_param_op_names[apm->op],
|
|
126 apm->prev_clone_adjustment ? "prev_clone_adjustment " : "");
|
|
127 switch (apm->op)
|
|
128 {
|
|
129 case IPA_PARAM_OP_UNDEFINED:
|
|
130 break;
|
131
|
131
|
145
|
132 case IPA_PARAM_OP_COPY:
|
|
133 fprintf (f, ", base_index: %u", apm->base_index);
|
|
134 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
|
|
135 break;
|
131
|
136
|
145
|
137 case IPA_PARAM_OP_SPLIT:
|
|
138 fprintf (f, ", offset: %u", apm->unit_offset);
|
|
139 /* fall-through */
|
|
140 case IPA_PARAM_OP_NEW:
|
|
141 fprintf (f, ", base_index: %u", apm->base_index);
|
|
142 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
|
|
143 print_node_brief (f, ", type: ", apm->type, 0);
|
|
144 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0);
|
|
145 fprintf (f, " prefix: %s",
|
|
146 ipa_param_prefixes[apm->param_prefix_index]);
|
|
147 if (apm->reverse)
|
|
148 fprintf (f, ", reverse-sso");
|
|
149 break;
|
131
|
150 }
|
145
|
151 fprintf (f, "\n");
|
|
152 }
|
|
153 }
|
131
|
154
|
145
|
155 /* Fill NEW_TYPES with types of a function after its current OTYPES have been
|
|
156 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use
|
|
157 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter
|
|
158 is false. */
|
131
|
159
|
145
|
160 static void
|
|
161 fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes,
|
|
162 vec<ipa_adjusted_param, va_gc> *adj_params,
|
|
163 bool use_prev_indices)
|
|
164 {
|
|
165 unsigned adj_len = vec_safe_length (adj_params);
|
|
166 new_types->reserve_exact (adj_len);
|
|
167 for (unsigned i = 0; i < adj_len ; i++)
|
|
168 {
|
|
169 ipa_adjusted_param *apm = &(*adj_params)[i];
|
|
170 if (apm->op == IPA_PARAM_OP_COPY)
|
|
171 {
|
|
172 unsigned index
|
|
173 = use_prev_indices ? apm->prev_clone_index : apm->base_index;
|
|
174 /* The following needs to be handled gracefully because of type
|
|
175 mismatches. This happens with LTO but apparently also in Fortran
|
|
176 with -fcoarray=lib -O2 -lcaf_single -latomic. */
|
|
177 if (index >= otypes->length ())
|
|
178 continue;
|
|
179 new_types->quick_push ((*otypes)[index]);
|
|
180 }
|
|
181 else if (apm->op == IPA_PARAM_OP_NEW
|
|
182 || apm->op == IPA_PARAM_OP_SPLIT)
|
|
183 {
|
|
184 tree ntype = apm->type;
|
|
185 if (is_gimple_reg_type (ntype)
|
|
186 && TYPE_MODE (ntype) != BLKmode)
|
|
187 {
|
|
188 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype));
|
|
189 if (TYPE_ALIGN (ntype) != malign)
|
|
190 ntype = build_aligned_type (ntype, malign);
|
|
191 }
|
|
192 new_types->quick_push (ntype);
|
|
193 }
|
|
194 else
|
|
195 gcc_unreachable ();
|
|
196 }
|
|
197 }
|
131
|
198
|
145
|
199 /* Build and return a function type just like ORIG_TYPE but with parameter
|
|
200 types given in NEW_PARAM_TYPES - which can be NULL if, but only if,
|
|
201 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make
|
|
202 it a FUNCTION_TYPE instead of FUNCTION_TYPE. */
|
131
|
203
|
145
|
204 static tree
|
|
205 build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types,
|
|
206 bool method2func, bool skip_return)
|
|
207 {
|
|
208 tree new_arg_types = NULL;
|
|
209 if (TYPE_ARG_TYPES (orig_type))
|
|
210 {
|
|
211 gcc_checking_assert (new_param_types);
|
|
212 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type)))
|
|
213 == void_type_node);
|
|
214 unsigned len = new_param_types->length ();
|
|
215 for (unsigned i = 0; i < len; i++)
|
|
216 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i],
|
|
217 new_arg_types);
|
131
|
218
|
145
|
219 tree new_reversed = nreverse (new_arg_types);
|
131
|
220 if (last_parm_void)
|
|
221 {
|
|
222 if (new_reversed)
|
|
223 TREE_CHAIN (new_arg_types) = void_list_node;
|
|
224 else
|
|
225 new_reversed = void_list_node;
|
|
226 }
|
145
|
227 new_arg_types = new_reversed;
|
131
|
228 }
|
|
229
|
145
|
230 /* Use build_distinct_type_copy to preserve as much as possible from original
|
|
231 type (debug info, attribute lists etc.). The one exception is
|
|
232 METHOD_TYPEs which must have THIS argument and when we are asked to remove
|
|
233 it, we need to build new FUNCTION_TYPE instead. */
|
131
|
234 tree new_type = NULL;
|
145
|
235 if (method2func)
|
131
|
236 {
|
145
|
237 tree ret_type;
|
|
238 if (skip_return)
|
|
239 ret_type = void_type_node;
|
|
240 else
|
|
241 ret_type = TREE_TYPE (orig_type);
|
|
242
|
|
243 new_type
|
|
244 = build_distinct_type_copy (build_function_type (ret_type,
|
|
245 new_arg_types));
|
|
246 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
|
131
|
247 }
|
|
248 else
|
|
249 {
|
145
|
250 new_type = build_distinct_type_copy (orig_type);
|
|
251 TYPE_ARG_TYPES (new_type) = new_arg_types;
|
|
252 if (skip_return)
|
|
253 TREE_TYPE (new_type) = void_type_node;
|
131
|
254 }
|
|
255
|
145
|
256 return new_type;
|
|
257 }
|
|
258
|
|
259 /* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there
|
|
260 is none. */
|
|
261
|
|
262 int
|
|
263 ipa_param_adjustments::get_max_base_index ()
|
|
264 {
|
|
265 unsigned adj_len = vec_safe_length (m_adj_params);
|
|
266 int max_index = -1;
|
|
267 for (unsigned i = 0; i < adj_len ; i++)
|
131
|
268 {
|
145
|
269 ipa_adjusted_param *apm = &(*m_adj_params)[i];
|
|
270 if (apm->op == IPA_PARAM_OP_COPY
|
|
271 && max_index < apm->base_index)
|
|
272 max_index = apm->base_index;
|
131
|
273 }
|
145
|
274 return max_index;
|
131
|
275 }
|
|
276
|
145
|
277
|
|
278 /* Fill SURVIVING_PARAMS with an array of bools where each one says whether a
|
|
279 parameter that originally was at that position still survives in the given
|
|
280 clone or is removed/replaced. If the final array is smaller than an index
|
|
281 of an original parameter, that parameter also did not survive. That a
|
|
282 parameter survives does not mean it has the same index as before. */
|
|
283
|
|
284 void
|
|
285 ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params)
|
|
286 {
|
|
287 unsigned adj_len = vec_safe_length (m_adj_params);
|
|
288 int max_index = get_max_base_index ();
|
|
289
|
|
290 if (max_index < 0)
|
|
291 return;
|
|
292 surviving_params->reserve_exact (max_index + 1);
|
|
293 surviving_params->quick_grow_cleared (max_index + 1);
|
|
294 for (unsigned i = 0; i < adj_len ; i++)
|
|
295 {
|
|
296 ipa_adjusted_param *apm = &(*m_adj_params)[i];
|
|
297 if (apm->op == IPA_PARAM_OP_COPY)
|
|
298 (*surviving_params)[apm->base_index] = true;
|
|
299 }
|
|
300 }
|
|
301
|
|
302 /* Fill NEW_INDICES with new indices of each surviving parameter or -1 for
|
|
303 those which do not survive. Any parameter outside of lenght of the vector
|
|
304 does not survive. There is currently no support for a parameter to be
|
|
305 copied to two distinct new parameters. */
|
131
|
306
|
|
307 void
|
145
|
308 ipa_param_adjustments::get_updated_indices (vec<int> *new_indices)
|
|
309 {
|
|
310 unsigned adj_len = vec_safe_length (m_adj_params);
|
|
311 int max_index = get_max_base_index ();
|
|
312
|
|
313 if (max_index < 0)
|
|
314 return;
|
|
315 unsigned res_len = max_index + 1;
|
|
316 new_indices->reserve_exact (res_len);
|
|
317 for (unsigned i = 0; i < res_len ; i++)
|
|
318 new_indices->quick_push (-1);
|
|
319 for (unsigned i = 0; i < adj_len ; i++)
|
|
320 {
|
|
321 ipa_adjusted_param *apm = &(*m_adj_params)[i];
|
|
322 if (apm->op == IPA_PARAM_OP_COPY)
|
|
323 (*new_indices)[apm->base_index] = i;
|
|
324 }
|
|
325 }
|
|
326
|
|
327 /* Return the original index for the given new parameter index. Return a
|
|
328 negative number if not available. */
|
|
329
|
|
330 int
|
|
331 ipa_param_adjustments::get_original_index (int newidx)
|
|
332 {
|
|
333 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx];
|
|
334 if (adj->op != IPA_PARAM_OP_COPY)
|
|
335 return -1;
|
|
336 return adj->base_index;
|
|
337 }
|
|
338
|
|
339 /* Return true if the first parameter (assuming there was one) survives the
|
|
340 transformation intact and remains the first one. */
|
|
341
|
|
342 bool
|
|
343 ipa_param_adjustments::first_param_intact_p ()
|
|
344 {
|
|
345 return (!vec_safe_is_empty (m_adj_params)
|
|
346 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY
|
|
347 && (*m_adj_params)[0].base_index == 0);
|
|
348 }
|
|
349
|
|
350 /* Return true if we have to change what has formerly been a method into a
|
|
351 function. */
|
|
352
|
|
353 bool
|
|
354 ipa_param_adjustments::method2func_p (tree orig_type)
|
|
355 {
|
|
356 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ());
|
|
357 }
|
|
358
|
|
359 /* Given function type OLD_TYPE, return a new type derived from it after
|
|
360 performing all atored modifications. TYPE_ORIGINAL_P should be true when
|
|
361 OLD_TYPE refers to the type before any IPA transformations, as opposed to a
|
|
362 type that can be an intermediate one in between various IPA
|
|
363 transformations. */
|
|
364
|
|
365 tree
|
|
366 ipa_param_adjustments::build_new_function_type (tree old_type,
|
|
367 bool type_original_p)
|
131
|
368 {
|
145
|
369 auto_vec<tree,16> new_param_types, *new_param_types_p;
|
|
370 if (prototype_p (old_type))
|
|
371 {
|
|
372 auto_vec<tree, 16> otypes;
|
|
373 push_function_arg_types (&otypes, old_type);
|
|
374 fill_vector_of_new_param_types (&new_param_types, &otypes, m_adj_params,
|
|
375 !type_original_p);
|
|
376 new_param_types_p = &new_param_types;
|
|
377 }
|
|
378 else
|
|
379 new_param_types_p = NULL;
|
|
380
|
|
381 return build_adjusted_function_type (old_type, new_param_types_p,
|
|
382 method2func_p (old_type), m_skip_return);
|
|
383 }
|
|
384
|
|
385 /* Build variant of function decl ORIG_DECL which has no return value if
|
|
386 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has
|
|
387 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from
|
|
388 DECL_ARGUMENTS list are not processed now, since they are linked by
|
|
389 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is
|
|
390 responsible for eliminating them when clones are properly materialized. */
|
|
391
|
|
392 tree
|
|
393 ipa_param_adjustments::adjust_decl (tree orig_decl)
|
|
394 {
|
|
395 tree new_decl = copy_node (orig_decl);
|
|
396 tree orig_type = TREE_TYPE (orig_decl);
|
|
397 if (prototype_p (orig_type)
|
|
398 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type))))
|
|
399 {
|
|
400 tree new_type = build_new_function_type (orig_type, false);
|
|
401 TREE_TYPE (new_decl) = new_type;
|
|
402 }
|
|
403 if (method2func_p (orig_type))
|
|
404 DECL_VINDEX (new_decl) = NULL_TREE;
|
|
405
|
|
406 /* When signature changes, we need to clear builtin info. */
|
|
407 if (fndecl_built_in_p (new_decl))
|
|
408 set_decl_built_in_function (new_decl, NOT_BUILT_IN, 0);
|
|
409
|
|
410 DECL_VIRTUAL_P (new_decl) = 0;
|
|
411 DECL_LANG_SPECIFIC (new_decl) = NULL;
|
|
412
|
|
413 return new_decl;
|
|
414 }
|
|
415
|
|
416 /* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA
|
|
417 transformations. Return true if EXPR has an interesting form and fill in
|
|
418 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */
|
|
419
|
|
420 static bool
|
|
421 isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p)
|
|
422 {
|
|
423 HOST_WIDE_INT offset, size;
|
|
424 bool reverse;
|
|
425 tree base
|
|
426 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse);
|
|
427 if (!base || size < 0)
|
|
428 return false;
|
|
429
|
|
430 if ((offset % BITS_PER_UNIT) != 0)
|
|
431 return false;
|
|
432
|
|
433 if (TREE_CODE (base) == MEM_REF)
|
|
434 {
|
|
435 poly_int64 plmoff = mem_ref_offset (base).force_shwi ();
|
|
436 HOST_WIDE_INT moff;
|
|
437 bool is_cst = plmoff.is_constant (&moff);
|
|
438 if (!is_cst)
|
|
439 return false;
|
|
440 offset += moff * BITS_PER_UNIT;
|
|
441 base = TREE_OPERAND (base, 0);
|
|
442 }
|
|
443
|
|
444 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX)
|
|
445 return false;
|
|
446
|
|
447 *base_p = base;
|
|
448 *unit_offset_p = offset / BITS_PER_UNIT;
|
|
449 return true;
|
|
450 }
|
|
451
|
|
452 /* Return true if EXPR describes a transitive split (i.e. one that happened for
|
|
453 both the caller and the callee) as recorded in PERFORMED_SPLITS. In that
|
|
454 case, store index of the respective record in PERFORMED_SPLITS into
|
|
455 *SM_IDX_P and the unit offset from all handled components in EXPR into
|
|
456 *UNIT_OFFSET_P. */
|
131
|
457
|
145
|
458 static bool
|
|
459 transitive_split_p (vec<ipa_param_performed_split, va_gc> *performed_splits,
|
|
460 tree expr, unsigned *sm_idx_p, unsigned *unit_offset_p)
|
|
461 {
|
|
462 tree base;
|
|
463 if (!isra_get_ref_base_and_offset (expr, &base, unit_offset_p))
|
|
464 return false;
|
|
465
|
|
466 if (TREE_CODE (base) == SSA_NAME)
|
|
467 {
|
|
468 base = SSA_NAME_VAR (base);
|
|
469 if (!base)
|
|
470 return false;
|
|
471 }
|
|
472
|
|
473 unsigned len = vec_safe_length (performed_splits);
|
|
474 for (unsigned i = 0 ; i < len; i++)
|
|
475 {
|
|
476 ipa_param_performed_split *sm = &(*performed_splits)[i];
|
|
477 if (sm->dummy_decl == base)
|
|
478 {
|
|
479 *sm_idx_p = i;
|
|
480 return true;
|
|
481 }
|
|
482 }
|
|
483 return false;
|
|
484 }
|
|
485
|
|
486 /* Structure to hold declarations representing transitive IPA-SRA splits. In
|
|
487 essence, if we need to pass UNIT_OFFSET of a parameter which originally has
|
|
488 number BASE_INDEX, we should pass down REPL. */
|
|
489
|
|
490 struct transitive_split_map
|
|
491 {
|
|
492 tree repl;
|
|
493 unsigned base_index;
|
|
494 unsigned unit_offset;
|
|
495 };
|
|
496
|
|
497 /* If call STMT contains any parameters representing transitive splits as
|
|
498 described by PERFORMED_SPLITS, return the number of extra parameters that
|
|
499 were addded during clone materialization and fill in INDEX_MAP with adjusted
|
|
500 indices of corresponding original parameters and TRANS_MAP with description
|
|
501 of all transitive replacement descriptions. Otherwise return zero. */
|
|
502
|
|
503 static unsigned
|
|
504 init_transitive_splits (vec<ipa_param_performed_split, va_gc> *performed_splits,
|
|
505 gcall *stmt, vec <unsigned> *index_map,
|
|
506 auto_vec <transitive_split_map> *trans_map)
|
|
507 {
|
|
508 unsigned phony_arguments = 0;
|
|
509 unsigned stmt_idx = 0, base_index = 0;
|
|
510 unsigned nargs = gimple_call_num_args (stmt);
|
|
511 while (stmt_idx < nargs)
|
|
512 {
|
|
513 unsigned unit_offset_delta;
|
|
514 tree base_arg = gimple_call_arg (stmt, stmt_idx);
|
|
515
|
|
516 if (phony_arguments > 0)
|
|
517 index_map->safe_push (stmt_idx);
|
131
|
518
|
145
|
519 unsigned sm_idx;
|
|
520 stmt_idx++;
|
|
521 if (transitive_split_p (performed_splits, base_arg, &sm_idx,
|
|
522 &unit_offset_delta))
|
|
523 {
|
|
524 if (phony_arguments == 0)
|
|
525 /* We have optimistically avoided constructing index_map do far but
|
|
526 now it is clear it will be necessary, so let's create the easy
|
|
527 bit we skipped until now. */
|
|
528 for (unsigned k = 0; k < stmt_idx; k++)
|
|
529 index_map->safe_push (k);
|
|
530
|
|
531 tree dummy = (*performed_splits)[sm_idx].dummy_decl;
|
|
532 for (unsigned j = sm_idx; j < performed_splits->length (); j++)
|
|
533 {
|
|
534 ipa_param_performed_split *caller_split
|
|
535 = &(*performed_splits)[j];
|
|
536 if (caller_split->dummy_decl != dummy)
|
|
537 break;
|
|
538
|
|
539 tree arg = gimple_call_arg (stmt, stmt_idx);
|
|
540 struct transitive_split_map tsm;
|
|
541 tsm.repl = arg;
|
|
542 tsm.base_index = base_index;
|
|
543 if (caller_split->unit_offset >= unit_offset_delta)
|
|
544 {
|
|
545 tsm.unit_offset
|
|
546 = (caller_split->unit_offset - unit_offset_delta);
|
|
547 trans_map->safe_push (tsm);
|
|
548 }
|
|
549
|
|
550 phony_arguments++;
|
|
551 stmt_idx++;
|
|
552 }
|
|
553 }
|
|
554 base_index++;
|
|
555 }
|
|
556 return phony_arguments;
|
|
557 }
|
|
558
|
|
559 /* Modify actual arguments of a function call in statement STMT, assuming it
|
|
560 calls CALLEE_DECL. CALLER_ADJ must be the description of parameter
|
|
561 adjustments of the caller or NULL if there are none. Return the new
|
|
562 statement that replaced the old one. When invoked, cfun and
|
|
563 current_function_decl have to be set to the caller. */
|
|
564
|
|
565 gcall *
|
|
566 ipa_param_adjustments::modify_call (gcall *stmt,
|
|
567 vec<ipa_param_performed_split,
|
|
568 va_gc> *performed_splits,
|
|
569 tree callee_decl, bool update_references)
|
|
570 {
|
|
571 unsigned len = vec_safe_length (m_adj_params);
|
|
572 auto_vec<tree, 16> vargs (len);
|
|
573 tree old_decl = gimple_call_fndecl (stmt);
|
|
574 unsigned old_nargs = gimple_call_num_args (stmt);
|
|
575 auto_vec<bool, 16> kept (old_nargs);
|
|
576 kept.quick_grow_cleared (old_nargs);
|
|
577
|
|
578 auto_vec <unsigned, 16> index_map;
|
|
579 auto_vec <transitive_split_map> trans_map;
|
|
580 bool transitive_remapping = false;
|
|
581
|
|
582 if (performed_splits)
|
|
583 {
|
|
584 unsigned removed = init_transitive_splits (performed_splits,
|
|
585 stmt, &index_map, &trans_map);
|
|
586 if (removed > 0)
|
|
587 {
|
|
588 transitive_remapping = true;
|
|
589 old_nargs -= removed;
|
|
590 }
|
|
591 }
|
|
592
|
|
593 cgraph_node *current_node = cgraph_node::get (current_function_decl);
|
|
594 if (update_references)
|
|
595 current_node->remove_stmt_references (stmt);
|
|
596
|
|
597 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
|
|
598 gimple_stmt_iterator prev_gsi = gsi;
|
131
|
599 gsi_prev (&prev_gsi);
|
145
|
600 for (unsigned i = 0; i < len; i++)
|
131
|
601 {
|
145
|
602 ipa_adjusted_param *apm = &(*m_adj_params)[i];
|
|
603 if (apm->op == IPA_PARAM_OP_COPY)
|
|
604 {
|
|
605 unsigned index = apm->base_index;
|
|
606 if (index >= old_nargs)
|
|
607 /* Can happen if the original call has argument mismatch,
|
|
608 ignore. */
|
|
609 continue;
|
|
610 if (transitive_remapping)
|
|
611 index = index_map[apm->base_index];
|
131
|
612
|
145
|
613 tree arg = gimple_call_arg (stmt, index);
|
131
|
614
|
|
615 vargs.quick_push (arg);
|
145
|
616 kept[index] = true;
|
|
617 continue;
|
131
|
618 }
|
|
619
|
145
|
620 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself.
|
|
621 If we ever want to support it during WPA IPA stage, we'll need a
|
|
622 mechanism to call into the IPA passes that introduced them. Currently
|
|
623 we simply mandate that IPA infrastructure understands all argument
|
|
624 modifications. Remember, edge redirection/modification is done only
|
|
625 once, not in steps for each pass modifying the callee like clone
|
|
626 materialization. */
|
|
627 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT);
|
131
|
628
|
145
|
629 /* We have to handle transitive changes differently using the maps we
|
|
630 have created before. So look into them first. */
|
|
631 tree repl = NULL_TREE;
|
|
632 for (unsigned j = 0; j < trans_map.length (); j++)
|
|
633 if (trans_map[j].base_index == apm->base_index
|
|
634 && trans_map[j].unit_offset == apm->unit_offset)
|
|
635 {
|
|
636 repl = trans_map[j].repl;
|
|
637 break;
|
|
638 }
|
|
639 if (repl)
|
|
640 {
|
|
641 vargs.quick_push (repl);
|
|
642 continue;
|
|
643 }
|
131
|
644
|
145
|
645 unsigned index = apm->base_index;
|
|
646 if (index >= old_nargs)
|
|
647 /* Can happen if the original call has argument mismatch, ignore. */
|
|
648 continue;
|
|
649 if (transitive_remapping)
|
|
650 index = index_map[apm->base_index];
|
|
651 tree base = gimple_call_arg (stmt, index);
|
131
|
652
|
145
|
653 /* We create a new parameter out of the value of the old one, we can
|
|
654 do the following kind of transformations:
|
131
|
655
|
145
|
656 - A scalar passed by reference, potentially as a part of a larger
|
|
657 aggregate, is converted to a scalar passed by value.
|
|
658
|
|
659 - A part of an aggregate is passed instead of the whole aggregate. */
|
131
|
660
|
145
|
661 location_t loc = gimple_location (stmt);
|
|
662 tree off;
|
|
663 bool deref_base = false;
|
|
664 unsigned int deref_align = 0;
|
|
665 if (TREE_CODE (base) != ADDR_EXPR
|
|
666 && is_gimple_reg_type (TREE_TYPE (base)))
|
|
667 {
|
|
668 /* Detect type mismatches in calls in invalid programs and make a
|
|
669 poor attempt to gracefully convert them so that we don't ICE. */
|
|
670 if (!POINTER_TYPE_P (TREE_TYPE (base)))
|
|
671 base = force_value_to_type (ptr_type_node, base);
|
131
|
672
|
145
|
673 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
|
|
674 }
|
|
675 else
|
|
676 {
|
|
677 bool addrof;
|
|
678 if (TREE_CODE (base) == ADDR_EXPR)
|
|
679 {
|
|
680 base = TREE_OPERAND (base, 0);
|
|
681 addrof = true;
|
|
682 }
|
|
683 else
|
|
684 addrof = false;
|
|
685
|
|
686 tree prev_base = base;
|
|
687 poly_int64 base_offset;
|
|
688 base = get_addr_base_and_unit_offset (base, &base_offset);
|
|
689
|
|
690 /* Aggregate arguments can have non-invariant addresses. */
|
|
691 if (!base)
|
|
692 {
|
|
693 base = build_fold_addr_expr (prev_base);
|
|
694 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
|
|
695 }
|
|
696 else if (TREE_CODE (base) == MEM_REF)
|
|
697 {
|
|
698 if (!addrof)
|
131
|
699 {
|
145
|
700 deref_base = true;
|
|
701 deref_align = TYPE_ALIGN (TREE_TYPE (base));
|
131
|
702 }
|
145
|
703 off = build_int_cst (apm->alias_ptr_type,
|
|
704 base_offset + apm->unit_offset);
|
|
705 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
|
|
706 off);
|
|
707 base = TREE_OPERAND (base, 0);
|
131
|
708 }
|
|
709 else
|
|
710 {
|
145
|
711 off = build_int_cst (apm->alias_ptr_type,
|
|
712 base_offset + apm->unit_offset);
|
|
713 base = build_fold_addr_expr (base);
|
131
|
714 }
|
145
|
715 }
|
|
716
|
|
717 tree type = apm->type;
|
|
718 unsigned int align;
|
|
719 unsigned HOST_WIDE_INT misalign;
|
|
720
|
|
721 if (deref_base)
|
|
722 {
|
|
723 align = deref_align;
|
|
724 misalign = 0;
|
|
725 }
|
|
726 else
|
|
727 {
|
|
728 get_pointer_alignment_1 (base, &align, &misalign);
|
|
729 /* All users must make sure that we can be optimistic when it
|
|
730 comes to alignment in this case (by inspecting the final users
|
|
731 of these new parameters). */
|
|
732 if (TYPE_ALIGN (type) > align)
|
|
733 align = TYPE_ALIGN (type);
|
131
|
734 }
|
145
|
735 misalign
|
|
736 += (offset_int::from (wi::to_wide (off), SIGNED).to_short_addr ()
|
|
737 * BITS_PER_UNIT);
|
|
738 misalign = misalign & (align - 1);
|
|
739 if (misalign != 0)
|
|
740 align = least_bit_hwi (misalign);
|
|
741 if (align < TYPE_ALIGN (type))
|
|
742 type = build_aligned_type (type, align);
|
|
743 base = force_gimple_operand_gsi (&gsi, base,
|
|
744 true, NULL, true, GSI_SAME_STMT);
|
|
745 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off);
|
|
746 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse;
|
|
747 /* If expr is not a valid gimple call argument emit
|
|
748 a load into a temporary. */
|
|
749 if (is_gimple_reg_type (TREE_TYPE (expr)))
|
131
|
750 {
|
145
|
751 gimple *tem = gimple_build_assign (NULL_TREE, expr);
|
|
752 if (gimple_in_ssa_p (cfun))
|
|
753 {
|
|
754 gimple_set_vuse (tem, gimple_vuse (stmt));
|
|
755 expr = make_ssa_name (TREE_TYPE (expr), tem);
|
|
756 }
|
|
757 else
|
|
758 expr = create_tmp_reg (TREE_TYPE (expr));
|
|
759 gimple_assign_set_lhs (tem, expr);
|
|
760 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
|
|
761 }
|
|
762 vargs.quick_push (expr);
|
|
763 }
|
|
764
|
|
765 if (m_always_copy_start >= 0)
|
|
766 for (unsigned i = m_always_copy_start; i < old_nargs; i++)
|
|
767 vargs.safe_push (gimple_call_arg (stmt, i));
|
131
|
768
|
145
|
769 /* For optimized away parameters, add on the caller side
|
|
770 before the call
|
|
771 DEBUG D#X => parm_Y(D)
|
|
772 stmts and associate D#X with parm in decl_debug_args_lookup
|
|
773 vector to say for debug info that if parameter parm had been passed,
|
|
774 it would have value parm_Y(D). */
|
|
775 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl)
|
|
776 {
|
|
777 vec<tree, va_gc> **debug_args = NULL;
|
|
778 unsigned i = 0;
|
|
779 for (tree old_parm = DECL_ARGUMENTS (old_decl);
|
|
780 old_parm && i < old_nargs && ((int) i) < m_always_copy_start;
|
|
781 old_parm = DECL_CHAIN (old_parm), i++)
|
|
782 {
|
|
783 if (!is_gimple_reg (old_parm) || kept[i])
|
|
784 continue;
|
|
785 tree origin = DECL_ORIGIN (old_parm);
|
|
786 tree arg = gimple_call_arg (stmt, i);
|
|
787
|
131
|
788 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
|
|
789 {
|
|
790 if (!fold_convertible_p (TREE_TYPE (origin), arg))
|
|
791 continue;
|
145
|
792 tree rhs1;
|
|
793 if (TREE_CODE (arg) == SSA_NAME
|
|
794 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
|
|
795 && (rhs1
|
|
796 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
|
|
797 && useless_type_conversion_p (TREE_TYPE (origin),
|
|
798 TREE_TYPE (rhs1)))
|
|
799 arg = rhs1;
|
|
800 else
|
|
801 arg = fold_convert_loc (gimple_location (stmt),
|
|
802 TREE_TYPE (origin), arg);
|
131
|
803 }
|
|
804 if (debug_args == NULL)
|
|
805 debug_args = decl_debug_args_insert (callee_decl);
|
145
|
806 unsigned int ix;
|
|
807 tree ddecl = NULL_TREE;
|
131
|
808 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
|
|
809 if (ddecl == origin)
|
|
810 {
|
|
811 ddecl = (**debug_args)[ix + 1];
|
|
812 break;
|
|
813 }
|
|
814 if (ddecl == NULL)
|
|
815 {
|
|
816 ddecl = make_node (DEBUG_EXPR_DECL);
|
|
817 DECL_ARTIFICIAL (ddecl) = 1;
|
|
818 TREE_TYPE (ddecl) = TREE_TYPE (origin);
|
|
819 SET_DECL_MODE (ddecl, DECL_MODE (origin));
|
|
820
|
|
821 vec_safe_push (*debug_args, origin);
|
|
822 vec_safe_push (*debug_args, ddecl);
|
|
823 }
|
145
|
824 gimple *def_temp = gimple_build_debug_bind (ddecl,
|
|
825 unshare_expr (arg), stmt);
|
131
|
826 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
|
|
827 }
|
|
828 }
|
|
829
|
|
830 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
831 {
|
|
832 fprintf (dump_file, "replacing stmt:");
|
|
833 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
|
|
834 }
|
|
835
|
145
|
836 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs);
|
|
837
|
|
838 if (tree lhs = gimple_call_lhs (stmt))
|
|
839 {
|
|
840 if (!m_skip_return)
|
|
841 gimple_call_set_lhs (new_stmt, lhs);
|
|
842 else if (TREE_CODE (lhs) == SSA_NAME)
|
|
843 {
|
|
844 /* LHS should now by a default-def SSA. Unfortunately default-def
|
|
845 SSA_NAMEs need a backing variable (or at least some code examining
|
|
846 SSAs assumes it is non-NULL). So we either have to re-use the
|
|
847 decl we have at hand or introdice a new one. */
|
|
848 tree repl = create_tmp_var (TREE_TYPE (lhs), "removed_return");
|
|
849 repl = get_or_create_ssa_default_def (cfun, repl);
|
|
850 SSA_NAME_IS_DEFAULT_DEF (repl) = true;
|
|
851 imm_use_iterator ui;
|
|
852 use_operand_p use_p;
|
|
853 gimple *using_stmt;
|
|
854 FOR_EACH_IMM_USE_STMT (using_stmt, ui, lhs)
|
|
855 {
|
|
856 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
|
|
857 {
|
|
858 SET_USE (use_p, repl);
|
|
859 }
|
|
860 update_stmt (using_stmt);
|
|
861 }
|
|
862 }
|
|
863 }
|
131
|
864
|
|
865 gimple_set_block (new_stmt, gimple_block (stmt));
|
|
866 if (gimple_has_location (stmt))
|
|
867 gimple_set_location (new_stmt, gimple_location (stmt));
|
|
868 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
|
|
869 gimple_call_copy_flags (new_stmt, stmt);
|
|
870 if (gimple_in_ssa_p (cfun))
|
145
|
871 gimple_move_vops (new_stmt, stmt);
|
131
|
872
|
|
873 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
874 {
|
|
875 fprintf (dump_file, "with stmt:");
|
|
876 print_gimple_stmt (dump_file, new_stmt, 0);
|
|
877 fprintf (dump_file, "\n");
|
|
878 }
|
|
879 gsi_replace (&gsi, new_stmt, true);
|
145
|
880 if (update_references)
|
|
881 do
|
|
882 {
|
|
883 current_node->record_stmt_references (gsi_stmt (gsi));
|
|
884 gsi_prev (&gsi);
|
|
885 }
|
|
886 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
|
|
887 return new_stmt;
|
|
888 }
|
|
889
|
|
890 /* Dump information contained in the object in textual form to F. */
|
|
891
|
|
892 void
|
|
893 ipa_param_adjustments::dump (FILE *f)
|
|
894 {
|
|
895 fprintf (f, " m_always_copy_start: %i\n", m_always_copy_start);
|
|
896 ipa_dump_adjusted_parameters (f, m_adj_params);
|
|
897 if (m_skip_return)
|
|
898 fprintf (f, " Will SKIP return.\n");
|
|
899 }
|
|
900
|
|
901 /* Dump information contained in the object in textual form to stderr. */
|
|
902
|
|
903 void
|
|
904 ipa_param_adjustments::debug ()
|
|
905 {
|
|
906 dump (stderr);
|
|
907 }
|
|
908
|
|
909 /* Register that REPLACEMENT should replace parameter described in APM and
|
|
910 optionally as DUMMY to mark transitive splits across calls. */
|
|
911
|
|
912 void
|
|
913 ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm,
|
|
914 tree replacement,
|
|
915 tree dummy)
|
|
916 {
|
|
917 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT
|
|
918 || apm->op == IPA_PARAM_OP_NEW);
|
|
919 gcc_checking_assert (!apm->prev_clone_adjustment);
|
|
920 ipa_param_body_replacement psr;
|
|
921 psr.base = m_oparms[apm->prev_clone_index];
|
|
922 psr.repl = replacement;
|
|
923 psr.dummy = dummy;
|
|
924 psr.unit_offset = apm->unit_offset;
|
|
925 m_replacements.safe_push (psr);
|
|
926 }
|
|
927
|
|
928 /* Copy or not, as appropriate given m_id and decl context, a pre-existing
|
|
929 PARM_DECL T so that it can be included in the parameters of the modified
|
|
930 function. */
|
|
931
|
|
932 tree
|
|
933 ipa_param_body_adjustments::carry_over_param (tree t)
|
|
934 {
|
|
935 tree new_parm;
|
|
936 if (m_id)
|
131
|
937 {
|
145
|
938 new_parm = remap_decl (t, m_id);
|
|
939 if (TREE_CODE (new_parm) != PARM_DECL)
|
|
940 new_parm = m_id->copy_decl (t, m_id);
|
131
|
941 }
|
145
|
942 else if (DECL_CONTEXT (t) != m_fndecl)
|
|
943 {
|
|
944 new_parm = copy_node (t);
|
|
945 DECL_CONTEXT (new_parm) = m_fndecl;
|
|
946 }
|
|
947 else
|
|
948 new_parm = t;
|
|
949 return new_parm;
|
131
|
950 }
|
|
951
|
145
|
952 /* Common initialization performed by all ipa_param_body_adjustments
|
|
953 constructors. OLD_FNDECL is the declaration we take original arguments
|
|
954 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to
|
|
955 a chained list of new local variables. TREE_MAP is the IPA-CP produced
|
|
956 mapping of trees to constants.
|
|
957
|
|
958 The function is rather long but it really onlu initializes all data members
|
|
959 of the class. It creates new param DECLs, finds their new types, */
|
131
|
960
|
145
|
961 void
|
|
962 ipa_param_body_adjustments::common_initialization (tree old_fndecl,
|
|
963 tree *vars,
|
|
964 vec<ipa_replace_map *,
|
|
965 va_gc> *tree_map)
|
131
|
966 {
|
145
|
967 push_function_arg_decls (&m_oparms, old_fndecl);
|
|
968 auto_vec<tree,16> otypes;
|
|
969 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE)
|
|
970 push_function_arg_types (&otypes, TREE_TYPE (old_fndecl));
|
|
971 else
|
131
|
972 {
|
145
|
973 auto_vec<tree,16> oparms;
|
|
974 push_function_arg_decls (&oparms, old_fndecl);
|
|
975 unsigned ocount = oparms.length ();
|
|
976 otypes.reserve_exact (ocount);
|
|
977 for (unsigned i = 0; i < ocount; i++)
|
|
978 otypes.quick_push (TREE_TYPE (oparms[i]));
|
|
979 }
|
|
980 fill_vector_of_new_param_types (&m_new_types, &otypes, m_adj_params, true);
|
|
981
|
|
982 auto_vec<bool, 16> kept;
|
|
983 kept.reserve_exact (m_oparms.length ());
|
|
984 kept.quick_grow_cleared (m_oparms.length ());
|
|
985 auto_vec<tree, 16> isra_dummy_decls;
|
|
986 isra_dummy_decls.reserve_exact (m_oparms.length ());
|
|
987 isra_dummy_decls.quick_grow_cleared (m_oparms.length ());
|
131
|
988
|
145
|
989 unsigned adj_len = vec_safe_length (m_adj_params);
|
|
990 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE)
|
|
991 && (adj_len == 0
|
|
992 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY
|
|
993 || (*m_adj_params)[0].base_index != 0));
|
|
994
|
|
995 /* The main job of the this function is to go over the vector of adjusted
|
|
996 parameters and create declarations or find corresponding old ones and push
|
|
997 them to m_new_decls. For IPA-SRA replacements it also creates
|
|
998 corresponding m_id->dst_node->clone.performed_splits entries. */
|
|
999
|
|
1000 m_new_decls.reserve_exact (adj_len);
|
|
1001 for (unsigned i = 0; i < adj_len ; i++)
|
|
1002 {
|
|
1003 ipa_adjusted_param *apm = &(*m_adj_params)[i];
|
|
1004 unsigned prev_index = apm->prev_clone_index;
|
|
1005 tree new_parm;
|
|
1006 if (apm->op == IPA_PARAM_OP_COPY
|
|
1007 || apm->prev_clone_adjustment)
|
131
|
1008 {
|
145
|
1009 kept[prev_index] = true;
|
|
1010 new_parm = carry_over_param (m_oparms[prev_index]);
|
|
1011 m_new_decls.quick_push (new_parm);
|
131
|
1012 }
|
145
|
1013 else if (apm->op == IPA_PARAM_OP_NEW
|
|
1014 || apm->op == IPA_PARAM_OP_SPLIT)
|
|
1015 {
|
|
1016 tree new_type = m_new_types[i];
|
|
1017 gcc_checking_assert (new_type);
|
|
1018 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
|
|
1019 new_type);
|
|
1020 const char *prefix = ipa_param_prefixes[apm->param_prefix_index];
|
|
1021 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
|
|
1022 DECL_ARTIFICIAL (new_parm) = 1;
|
|
1023 DECL_ARG_TYPE (new_parm) = new_type;
|
|
1024 DECL_CONTEXT (new_parm) = m_fndecl;
|
|
1025 TREE_USED (new_parm) = 1;
|
|
1026 DECL_IGNORED_P (new_parm) = 1;
|
|
1027 /* We assume all newly created arguments are not addressable. */
|
|
1028 if (TREE_CODE (new_type) == COMPLEX_TYPE
|
|
1029 || TREE_CODE (new_type) == VECTOR_TYPE)
|
|
1030 DECL_GIMPLE_REG_P (new_parm) = 1;
|
|
1031 layout_decl (new_parm, 0);
|
|
1032 m_new_decls.quick_push (new_parm);
|
|
1033
|
|
1034 if (apm->op == IPA_PARAM_OP_SPLIT)
|
|
1035 {
|
|
1036 m_split_modifications_p = true;
|
131
|
1037
|
145
|
1038 if (m_id)
|
|
1039 {
|
|
1040 tree dummy_decl;
|
|
1041 if (!isra_dummy_decls[prev_index])
|
|
1042 {
|
|
1043 dummy_decl = copy_decl_to_var (m_oparms[prev_index],
|
|
1044 m_id);
|
|
1045 /* Any attempt to remap this dummy in this particular
|
|
1046 instance of clone materialization should yield
|
|
1047 itself. */
|
|
1048 insert_decl_map (m_id, dummy_decl, dummy_decl);
|
|
1049
|
|
1050 DECL_CHAIN (dummy_decl) = *vars;
|
|
1051 *vars = dummy_decl;
|
|
1052 isra_dummy_decls[prev_index] = dummy_decl;
|
|
1053 }
|
|
1054 else
|
|
1055 dummy_decl = isra_dummy_decls[prev_index];
|
131
|
1056
|
145
|
1057 register_replacement (apm, new_parm, dummy_decl);
|
|
1058 ipa_param_performed_split ps;
|
|
1059 ps.dummy_decl = dummy_decl;
|
|
1060 ps.unit_offset = apm->unit_offset;
|
|
1061 vec_safe_push (m_id->dst_node->clone.performed_splits, ps);
|
|
1062 }
|
|
1063 else
|
|
1064 register_replacement (apm, new_parm);
|
|
1065 }
|
|
1066 }
|
|
1067 else
|
|
1068 gcc_unreachable ();
|
|
1069 }
|
|
1070
|
|
1071
|
|
1072 /* As part of body modifications, we will also have to replace remaining uses
|
|
1073 of remaining uses of removed PARM_DECLs (which do not however use the
|
|
1074 initial value) with their VAR_DECL copies.
|
|
1075
|
|
1076 We do this differently with and without m_id. With m_id, we rely on its
|
|
1077 mapping and create a replacement straight away. Without it, we have our
|
|
1078 own mechanism for which we have to populate m_removed_decls vector. Just
|
|
1079 don't mix them, that is why you should not call
|
|
1080 replace_removed_params_ssa_names or perform_cfun_body_modifications when
|
|
1081 you construct with ID not equal to NULL. */
|
131
|
1082
|
145
|
1083 unsigned op_len = m_oparms.length ();
|
|
1084 for (unsigned i = 0; i < op_len; i++)
|
|
1085 if (!kept[i])
|
|
1086 {
|
|
1087 if (m_id)
|
|
1088 {
|
|
1089 if (!m_id->decl_map->get (m_oparms[i]))
|
|
1090 {
|
|
1091 /* TODO: Perhaps at least aggregate-type params could re-use
|
|
1092 their isra_dummy_decl here? */
|
|
1093 tree var = copy_decl_to_var (m_oparms[i], m_id);
|
|
1094 insert_decl_map (m_id, m_oparms[i], var);
|
|
1095 /* Declare this new variable. */
|
|
1096 DECL_CHAIN (var) = *vars;
|
|
1097 *vars = var;
|
|
1098 }
|
|
1099 }
|
|
1100 else
|
|
1101 {
|
|
1102 m_removed_decls.safe_push (m_oparms[i]);
|
|
1103 m_removed_map.put (m_oparms[i], m_removed_decls.length () - 1);
|
|
1104 }
|
|
1105 }
|
|
1106
|
|
1107 if (!MAY_HAVE_DEBUG_STMTS)
|
|
1108 return;
|
131
|
1109
|
145
|
1110 /* Finally, when generating debug info, we fill vector m_reset_debug_decls
|
|
1111 with removed parameters declarations. We do this in order to re-map their
|
|
1112 debug bind statements and create debug decls for them. */
|
|
1113
|
|
1114 if (tree_map)
|
|
1115 {
|
|
1116 /* Do not output debuginfo for parameter declarations as if they vanished
|
|
1117 when they were in fact replaced by a constant. */
|
|
1118 auto_vec <int, 16> index_mapping;
|
|
1119 bool need_remap = false;
|
|
1120
|
|
1121 if (m_id && m_id->src_node->clone.param_adjustments)
|
131
|
1122 {
|
145
|
1123 ipa_param_adjustments *prev_adjustments
|
|
1124 = m_id->src_node->clone.param_adjustments;
|
|
1125 prev_adjustments->get_updated_indices (&index_mapping);
|
|
1126 need_remap = true;
|
|
1127 }
|
|
1128
|
|
1129 for (unsigned i = 0; i < tree_map->length (); i++)
|
|
1130 {
|
|
1131 int parm_num = (*tree_map)[i]->parm_num;
|
|
1132 gcc_assert (parm_num >= 0);
|
|
1133 if (need_remap)
|
|
1134 parm_num = index_mapping[parm_num];
|
|
1135 kept[parm_num] = true;
|
131
|
1136 }
|
|
1137 }
|
|
1138
|
145
|
1139 for (unsigned i = 0; i < op_len; i++)
|
|
1140 if (!kept[i] && is_gimple_reg (m_oparms[i]))
|
|
1141 m_reset_debug_decls.safe_push (m_oparms[i]);
|
|
1142 }
|
|
1143
|
|
1144 /* Constructor of ipa_param_body_adjustments from a simple list of
|
|
1145 modifications to parameters listed in ADJ_PARAMS which will prepare ground
|
|
1146 for modification of parameters of fndecl. Return value of the function will
|
|
1147 not be removed and the object will assume it does not run as a part of
|
|
1148 tree-function_versioning. */
|
|
1149
|
|
1150 ipa_param_body_adjustments
|
|
1151 ::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params,
|
|
1152 tree fndecl)
|
|
1153 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (),
|
|
1154 m_split_modifications_p (false), m_fndecl (fndecl), m_id (NULL),
|
|
1155 m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
|
|
1156 m_removed_decls (), m_removed_map (), m_method2func (false)
|
|
1157 {
|
|
1158 common_initialization (fndecl, NULL, NULL);
|
|
1159 }
|
|
1160
|
|
1161 /* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in
|
|
1162 ADJUSTMENTS which will prepare ground for modification of parameters of
|
|
1163 fndecl. The object will assume it does not run as a part of
|
|
1164 tree-function_versioning. */
|
131
|
1165
|
145
|
1166 ipa_param_body_adjustments
|
|
1167 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
|
|
1168 tree fndecl)
|
|
1169 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
|
|
1170 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
|
|
1171 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (),
|
|
1172 m_replacements (), m_removed_decls (), m_removed_map (),
|
|
1173 m_method2func (false)
|
|
1174 {
|
|
1175 common_initialization (fndecl, NULL, NULL);
|
|
1176 }
|
|
1177
|
|
1178 /* Constructor of ipa_param_body_adjustments which sets it up as a part of
|
|
1179 running tree_function_versioning. Planned modifications to the function are
|
|
1180 in ADJUSTMENTS. FNDECL designates the new function clone which is being
|
|
1181 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which
|
|
1182 at the time of invocation still share DECL_ARGUMENTS). ID is the
|
|
1183 copy_body_data structure driving the wholy body copying process. VARS is a
|
|
1184 pointer to the head of the list of new local variables, TREE_MAP is the map
|
|
1185 that drives tree substitution in the cloning process. */
|
|
1186
|
|
1187 ipa_param_body_adjustments
|
|
1188 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
|
|
1189 tree fndecl, tree old_fndecl,
|
|
1190 copy_body_data *id, tree *vars,
|
|
1191 vec<ipa_replace_map *, va_gc> *tree_map)
|
|
1192 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
|
|
1193 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
|
|
1194 m_id (id), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
|
|
1195 m_removed_decls (), m_removed_map (), m_method2func (false)
|
|
1196 {
|
|
1197 common_initialization (old_fndecl, vars, tree_map);
|
|
1198 }
|
|
1199
|
|
1200 /* Chain new param decls up and return them. */
|
|
1201
|
|
1202 tree
|
|
1203 ipa_param_body_adjustments::get_new_param_chain ()
|
|
1204 {
|
|
1205 tree result;
|
|
1206 tree *link = &result;
|
131
|
1207
|
145
|
1208 unsigned len = vec_safe_length (m_adj_params);
|
|
1209 for (unsigned i = 0; i < len; i++)
|
|
1210 {
|
|
1211 tree new_decl = m_new_decls[i];
|
|
1212 *link = new_decl;
|
|
1213 link = &DECL_CHAIN (new_decl);
|
|
1214 }
|
|
1215 *link = NULL_TREE;
|
|
1216 return result;
|
|
1217 }
|
|
1218
|
|
1219 /* Modify the function parameters FNDECL and its type according to the plan in
|
|
1220 ADJUSTMENTS. This function needs to be called when the decl has not already
|
|
1221 been processed with ipa_param_adjustments::adjust_decl, otherwise just
|
|
1222 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */
|
131
|
1223
|
145
|
1224 void
|
|
1225 ipa_param_body_adjustments::modify_formal_parameters ()
|
|
1226 {
|
|
1227 tree orig_type = TREE_TYPE (m_fndecl);
|
|
1228 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain ();
|
|
1229
|
|
1230 /* When signature changes, we need to clear builtin info. */
|
|
1231 if (fndecl_built_in_p (m_fndecl))
|
|
1232 set_decl_built_in_function (m_fndecl, NOT_BUILT_IN, 0);
|
|
1233
|
|
1234 /* At this point, removing return value is only implemented when going
|
|
1235 through tree_function_versioning, not when modifying function body
|
|
1236 directly. */
|
|
1237 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return);
|
|
1238 tree new_type = build_adjusted_function_type (orig_type, &m_new_types,
|
|
1239 m_method2func, false);
|
131
|
1240
|
145
|
1241 TREE_TYPE (m_fndecl) = new_type;
|
|
1242 DECL_VIRTUAL_P (m_fndecl) = 0;
|
|
1243 DECL_LANG_SPECIFIC (m_fndecl) = NULL;
|
|
1244 if (m_method2func)
|
|
1245 DECL_VINDEX (m_fndecl) = NULL_TREE;
|
|
1246 }
|
|
1247
|
|
1248 /* Given BASE and UNIT_OFFSET, find the corresponding record among replacement
|
|
1249 structures. */
|
|
1250
|
|
1251 ipa_param_body_replacement *
|
|
1252 ipa_param_body_adjustments::lookup_replacement_1 (tree base,
|
|
1253 unsigned unit_offset)
|
|
1254 {
|
|
1255 unsigned int len = m_replacements.length ();
|
|
1256 for (unsigned i = 0; i < len; i++)
|
|
1257 {
|
|
1258 ipa_param_body_replacement *pbr = &m_replacements[i];
|
131
|
1259
|
145
|
1260 if (pbr->base == base
|
|
1261 && (pbr->unit_offset == unit_offset))
|
|
1262 return pbr;
|
|
1263 }
|
|
1264 return NULL;
|
|
1265 }
|
131
|
1266
|
145
|
1267 /* Given BASE and UNIT_OFFSET, find the corresponding replacement expression
|
|
1268 and return it, assuming it is known it does not hold value by reference or
|
|
1269 in reverse storage order. */
|
131
|
1270
|
145
|
1271 tree
|
|
1272 ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset)
|
|
1273 {
|
|
1274 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset);
|
|
1275 if (!pbr)
|
|
1276 return NULL;
|
|
1277 return pbr->repl;
|
131
|
1278 }
|
|
1279
|
|
1280 /* If T is an SSA_NAME, return NULL if it is not a default def or
|
|
1281 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
|
|
1282 the base variable is always returned, regardless if it is a default
|
|
1283 def. Return T if it is not an SSA_NAME. */
|
|
1284
|
|
1285 static tree
|
|
1286 get_ssa_base_param (tree t, bool ignore_default_def)
|
|
1287 {
|
|
1288 if (TREE_CODE (t) == SSA_NAME)
|
|
1289 {
|
|
1290 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
|
|
1291 return SSA_NAME_VAR (t);
|
|
1292 else
|
|
1293 return NULL_TREE;
|
|
1294 }
|
|
1295 return t;
|
|
1296 }
|
|
1297
|
145
|
1298 /* Given an expression, return the structure describing how it should be
|
|
1299 replaced if it accesses a part of a split parameter or NULL otherwise.
|
131
|
1300
|
145
|
1301 Do not free the result, it will be deallocated when the object is destroyed.
|
131
|
1302
|
145
|
1303 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs
|
|
1304 which are default definitions, if set, consider all SSA_NAMEs of
|
|
1305 PARM_DECLs. */
|
131
|
1306
|
145
|
1307 ipa_param_body_replacement *
|
|
1308 ipa_param_body_adjustments::get_expr_replacement (tree expr,
|
|
1309 bool ignore_default_def)
|
131
|
1310 {
|
145
|
1311 tree base;
|
|
1312 unsigned unit_offset;
|
131
|
1313
|
145
|
1314 if (!isra_get_ref_base_and_offset (expr, &base, &unit_offset))
|
131
|
1315 return NULL;
|
|
1316
|
|
1317 base = get_ssa_base_param (base, ignore_default_def);
|
|
1318 if (!base || TREE_CODE (base) != PARM_DECL)
|
|
1319 return NULL;
|
145
|
1320 return lookup_replacement_1 (base, unit_offset);
|
131
|
1321 }
|
|
1322
|
145
|
1323 /* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed
|
|
1324 (which includes it being split or replaced), return a new variable that
|
|
1325 should be used for any SSA names that will remain in the function that
|
|
1326 previously belonged to OLD_DECL. */
|
131
|
1327
|
145
|
1328 tree
|
|
1329 ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl)
|
131
|
1330 {
|
145
|
1331 unsigned *idx = m_removed_map.get (old_decl);
|
|
1332 if (!idx)
|
|
1333 return NULL;
|
131
|
1334
|
145
|
1335 tree repl;
|
|
1336 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL)
|
131
|
1337 {
|
145
|
1338 gcc_assert (m_removed_decls[*idx] == old_decl);
|
|
1339 repl = copy_var_decl (old_decl, DECL_NAME (old_decl),
|
|
1340 TREE_TYPE (old_decl));
|
|
1341 m_removed_decls[*idx] = repl;
|
131
|
1342 }
|
|
1343 else
|
145
|
1344 repl = m_removed_decls[*idx];
|
|
1345 return repl;
|
|
1346 }
|
|
1347
|
|
1348 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
|
|
1349 parameter which is to be removed because its value is not used, create a new
|
|
1350 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
|
|
1351 original with it and return it. If there is no need to re-map, return NULL.
|
|
1352 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
|
|
1353
|
|
1354 tree
|
|
1355 ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name,
|
|
1356 gimple *stmt)
|
|
1357 {
|
|
1358 gcc_assert (!m_id);
|
|
1359 if (TREE_CODE (old_name) != SSA_NAME)
|
|
1360 return NULL;
|
|
1361
|
|
1362 tree decl = SSA_NAME_VAR (old_name);
|
|
1363 if (decl == NULL_TREE
|
|
1364 || TREE_CODE (decl) != PARM_DECL)
|
|
1365 return NULL;
|
|
1366
|
|
1367 tree repl = get_replacement_ssa_base (decl);
|
|
1368 if (!repl)
|
|
1369 return NULL;
|
|
1370
|
|
1371 tree new_name = make_ssa_name (repl, stmt);
|
|
1372 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
|
|
1373 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
|
131
|
1374
|
|
1375 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1376 {
|
145
|
1377 fprintf (dump_file, "replacing an SSA name of a removed param ");
|
|
1378 print_generic_expr (dump_file, old_name);
|
|
1379 fprintf (dump_file, " with ");
|
|
1380 print_generic_expr (dump_file, new_name);
|
|
1381 fprintf (dump_file, "\n");
|
|
1382 }
|
|
1383
|
|
1384 replace_uses_by (old_name, new_name);
|
|
1385 return new_name;
|
|
1386 }
|
|
1387
|
|
1388 /* If the expression *EXPR_P should be replaced, do so. CONVERT specifies
|
|
1389 whether the function should care about type incompatibility of the current
|
|
1390 and new expressions. If it is false, the function will leave
|
|
1391 incompatibility issues to the caller - note that when the function
|
|
1392 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify
|
|
1393 their bases instead of the expressions themselves and then also performs any
|
|
1394 necessary conversions. */
|
|
1395
|
|
1396 bool
|
|
1397 ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert)
|
|
1398 {
|
|
1399 tree expr = *expr_p;
|
|
1400
|
|
1401 if (TREE_CODE (expr) == BIT_FIELD_REF
|
|
1402 || TREE_CODE (expr) == IMAGPART_EXPR
|
|
1403 || TREE_CODE (expr) == REALPART_EXPR)
|
|
1404 {
|
|
1405 expr_p = &TREE_OPERAND (expr, 0);
|
|
1406 expr = *expr_p;
|
|
1407 convert = true;
|
|
1408 }
|
|
1409
|
|
1410 ipa_param_body_replacement *pbr = get_expr_replacement (expr, false);
|
|
1411 if (!pbr)
|
|
1412 return false;
|
|
1413
|
|
1414 tree repl = pbr->repl;
|
|
1415 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1416 {
|
131
|
1417 fprintf (dump_file, "About to replace expr ");
|
145
|
1418 print_generic_expr (dump_file, expr);
|
131
|
1419 fprintf (dump_file, " with ");
|
145
|
1420 print_generic_expr (dump_file, repl);
|
131
|
1421 fprintf (dump_file, "\n");
|
|
1422 }
|
|
1423
|
145
|
1424 if (convert && !useless_type_conversion_p (TREE_TYPE (expr),
|
|
1425 TREE_TYPE (repl)))
|
131
|
1426 {
|
145
|
1427 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl);
|
|
1428 *expr_p = vce;
|
131
|
1429 }
|
|
1430 else
|
145
|
1431 *expr_p = repl;
|
131
|
1432 return true;
|
|
1433 }
|
|
1434
|
145
|
1435 /* If the assignment statement STMT contains any expressions that need to
|
|
1436 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
|
|
1437 potential type incompatibilities. If any conversion sttements have to be
|
|
1438 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the
|
|
1439 statement was modified. */
|
|
1440
|
|
1441 bool
|
|
1442 ipa_param_body_adjustments::modify_assignment (gimple *stmt,
|
|
1443 gimple_seq *extra_stmts)
|
|
1444 {
|
|
1445 tree *lhs_p, *rhs_p;
|
|
1446 bool any;
|
|
1447
|
|
1448 if (!gimple_assign_single_p (stmt))
|
|
1449 return false;
|
|
1450
|
|
1451 rhs_p = gimple_assign_rhs1_ptr (stmt);
|
|
1452 lhs_p = gimple_assign_lhs_ptr (stmt);
|
|
1453
|
|
1454 any = modify_expression (lhs_p, false);
|
|
1455 any |= modify_expression (rhs_p, false);
|
|
1456 if (any
|
|
1457 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
|
|
1458 {
|
|
1459 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
|
|
1460 {
|
|
1461 /* V_C_Es of constructors can cause trouble (PR 42714). */
|
|
1462 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
|
|
1463 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
|
|
1464 else
|
|
1465 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
|
|
1466 NULL);
|
|
1467 }
|
|
1468 else
|
|
1469 {
|
|
1470 tree new_rhs = fold_build1_loc (gimple_location (stmt),
|
|
1471 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
|
|
1472 *rhs_p);
|
|
1473 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true,
|
|
1474 NULL_TREE);
|
|
1475 gimple_assign_set_rhs1 (stmt, tmp);
|
|
1476 }
|
|
1477 return true;
|
|
1478 }
|
|
1479
|
|
1480 return any;
|
|
1481 }
|
|
1482
|
|
1483 /* Data passed to remap_split_decl_to_dummy through walk_tree. */
|
|
1484
|
|
1485 struct simple_tree_swap_info
|
|
1486 {
|
|
1487 /* Change FROM to TO. */
|
|
1488 tree from, to;
|
|
1489 /* And set DONE to true when doing so. */
|
|
1490 bool done;
|
|
1491 };
|
|
1492
|
|
1493 /* Simple remapper to remap a split parameter to the same expression based on a
|
|
1494 special dummy decl so that edge redirections can detect transitive splitting
|
|
1495 and finish them. */
|
|
1496
|
|
1497 static tree
|
|
1498 remap_split_decl_to_dummy (tree *tp, int *walk_subtrees, void *data)
|
|
1499 {
|
|
1500 tree t = *tp;
|
|
1501
|
|
1502 if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
|
|
1503 {
|
|
1504 struct simple_tree_swap_info *swapinfo
|
|
1505 = (struct simple_tree_swap_info *) data;
|
|
1506 if (t == swapinfo->from
|
|
1507 || (TREE_CODE (t) == SSA_NAME
|
|
1508 && SSA_NAME_VAR (t) == swapinfo->from))
|
|
1509 {
|
|
1510 *tp = swapinfo->to;
|
|
1511 swapinfo->done = true;
|
|
1512 }
|
|
1513 *walk_subtrees = 0;
|
|
1514 }
|
|
1515 else if (TYPE_P (t))
|
|
1516 *walk_subtrees = 0;
|
|
1517 else
|
|
1518 *walk_subtrees = 1;
|
|
1519 return NULL_TREE;
|
|
1520 }
|
|
1521
|
|
1522
|
|
1523 /* If the call statement pointed at by STMT_P contains any expressions that
|
|
1524 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the
|
|
1525 statement needs to be rebuilt, do so. Return true if any modifications have
|
|
1526 been performed.
|
|
1527
|
|
1528 If the method is invoked as a part of IPA clone materialization and if any
|
|
1529 parameter split is transitive, i.e. it applies to the functin that is being
|
|
1530 modified and also to the callee of the statement, replace the parameter
|
|
1531 passed to old callee with an equivalent expression based on a dummy decl
|
|
1532 followed by PARM_DECLs representing the actual replacements. The actual
|
|
1533 replacements will be then converted into SSA_NAMEs and then
|
|
1534 ipa_param_adjustments::modify_call will find the appropriate ones and leave
|
|
1535 only those in the call. */
|
|
1536
|
|
1537 bool
|
|
1538 ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p)
|
|
1539 {
|
|
1540 gcall *stmt = *stmt_p;
|
|
1541 auto_vec <unsigned, 4> pass_through_args;
|
|
1542 auto_vec <unsigned, 4> pass_through_pbr_indices;
|
|
1543
|
|
1544 if (m_split_modifications_p && m_id)
|
|
1545 {
|
|
1546 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
|
|
1547 {
|
|
1548 tree t = gimple_call_arg (stmt, i);
|
|
1549 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF
|
|
1550 && TREE_CODE (t) != IMAGPART_EXPR
|
|
1551 && TREE_CODE (t) != REALPART_EXPR);
|
|
1552
|
|
1553 tree base;
|
|
1554 unsigned unit_offset;
|
|
1555 if (!isra_get_ref_base_and_offset (t, &base, &unit_offset))
|
|
1556 continue;
|
|
1557
|
|
1558 bool by_ref = false;
|
|
1559 if (TREE_CODE (base) == SSA_NAME)
|
|
1560 {
|
|
1561 if (!SSA_NAME_IS_DEFAULT_DEF (base))
|
|
1562 continue;
|
|
1563 base = SSA_NAME_VAR (base);
|
|
1564 gcc_checking_assert (base);
|
|
1565 by_ref = true;
|
|
1566 }
|
|
1567 if (TREE_CODE (base) != PARM_DECL)
|
|
1568 continue;
|
|
1569
|
|
1570 bool base_among_replacements = false;
|
|
1571 unsigned j, repl_list_len = m_replacements.length ();
|
|
1572 for (j = 0; j < repl_list_len; j++)
|
|
1573 {
|
|
1574 ipa_param_body_replacement *pbr = &m_replacements[j];
|
|
1575 if (pbr->base == base)
|
|
1576 {
|
|
1577 base_among_replacements = true;
|
|
1578 break;
|
|
1579 }
|
|
1580 }
|
|
1581 if (!base_among_replacements)
|
|
1582 continue;
|
|
1583
|
|
1584 /* We still have to distinguish between an end-use that we have to
|
|
1585 transform now and a pass-through, which happens in the following
|
|
1586 two cases. */
|
|
1587
|
|
1588 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider
|
|
1589 &MEM_REF[ssa_name + offset], we will also have to detect that case
|
|
1590 here. */
|
|
1591
|
|
1592 if (TREE_CODE (t) == SSA_NAME
|
|
1593 && SSA_NAME_IS_DEFAULT_DEF (t)
|
|
1594 && SSA_NAME_VAR (t)
|
|
1595 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL)
|
|
1596 {
|
|
1597 /* This must be a by_reference pass-through. */
|
|
1598 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
|
|
1599 pass_through_args.safe_push (i);
|
|
1600 pass_through_pbr_indices.safe_push (j);
|
|
1601 }
|
|
1602 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t)))
|
|
1603 {
|
|
1604 /* Currently IPA-SRA guarantees the aggregate access type
|
|
1605 exactly matches in this case. So if it does not match, it is
|
|
1606 a pass-through argument that will be sorted out at edge
|
|
1607 redirection time. */
|
|
1608 ipa_param_body_replacement *pbr
|
|
1609 = lookup_replacement_1 (base, unit_offset);
|
|
1610
|
|
1611 if (!pbr
|
|
1612 || (TYPE_MAIN_VARIANT (TREE_TYPE (t))
|
|
1613 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl))))
|
|
1614 {
|
|
1615 pass_through_args.safe_push (i);
|
|
1616 pass_through_pbr_indices.safe_push (j);
|
|
1617 }
|
|
1618 }
|
|
1619 }
|
|
1620 }
|
|
1621
|
|
1622 unsigned nargs = gimple_call_num_args (stmt);
|
|
1623 if (!pass_through_args.is_empty ())
|
|
1624 {
|
|
1625 auto_vec<tree, 16> vargs;
|
|
1626 unsigned pt_idx = 0;
|
|
1627 for (unsigned i = 0; i < nargs; i++)
|
|
1628 {
|
|
1629 if (pt_idx < pass_through_args.length ()
|
|
1630 && i == pass_through_args[pt_idx])
|
|
1631 {
|
|
1632 unsigned j = pass_through_pbr_indices[pt_idx];
|
|
1633 pt_idx++;
|
|
1634 tree base = m_replacements[j].base;
|
|
1635
|
|
1636 /* Map base will get mapped to the special transitive-isra marker
|
|
1637 dummy decl. */
|
|
1638 struct simple_tree_swap_info swapinfo;
|
|
1639 swapinfo.from = base;
|
|
1640 swapinfo.to = m_replacements[j].dummy;
|
|
1641 swapinfo.done = false;
|
|
1642 tree arg = gimple_call_arg (stmt, i);
|
|
1643 walk_tree (&arg, remap_split_decl_to_dummy, &swapinfo, NULL);
|
|
1644 gcc_assert (swapinfo.done);
|
|
1645 vargs.safe_push (arg);
|
|
1646 /* Now let's push all replacements pertaining to this parameter
|
|
1647 so that all gimple register ones get correct SSA_NAMES. Edge
|
|
1648 redirection will weed out the dummy argument as well as all
|
|
1649 unused replacements later. */
|
|
1650 unsigned int repl_list_len = m_replacements.length ();
|
|
1651 for (; j < repl_list_len; j++)
|
|
1652 {
|
|
1653 if (m_replacements[j].base != base)
|
|
1654 break;
|
|
1655 vargs.safe_push (m_replacements[j].repl);
|
|
1656 }
|
|
1657 }
|
|
1658 else
|
|
1659 {
|
|
1660 tree t = gimple_call_arg (stmt, i);
|
|
1661 modify_expression (&t, true);
|
|
1662 vargs.safe_push (t);
|
|
1663 }
|
|
1664 }
|
|
1665 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
|
|
1666 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
|
|
1667 gimple_call_copy_flags (new_stmt, stmt);
|
|
1668 if (tree lhs = gimple_call_lhs (stmt))
|
|
1669 {
|
|
1670 modify_expression (&lhs, false);
|
|
1671 gimple_call_set_lhs (new_stmt, lhs);
|
|
1672 }
|
|
1673 *stmt_p = new_stmt;
|
|
1674 return true;
|
|
1675 }
|
|
1676
|
|
1677 /* Otherwise, no need to rebuild the statement, let's just modify arguments
|
|
1678 and the LHS if/as appropriate. */
|
|
1679 bool modified = false;
|
|
1680 for (unsigned i = 0; i < nargs; i++)
|
|
1681 {
|
|
1682 tree *t = gimple_call_arg_ptr (stmt, i);
|
|
1683 modified |= modify_expression (t, true);
|
|
1684 }
|
|
1685
|
|
1686 if (gimple_call_lhs (stmt))
|
|
1687 {
|
|
1688 tree *t = gimple_call_lhs_ptr (stmt);
|
|
1689 modified |= modify_expression (t, false);
|
|
1690 }
|
|
1691
|
|
1692 return modified;
|
|
1693 }
|
|
1694
|
|
1695 /* If the statement STMT contains any expressions that need to replaced with a
|
|
1696 different one as noted by ADJUSTMENTS, do so. Handle any potential type
|
|
1697 incompatibilities. If any conversion sttements have to be pre-pended to
|
|
1698 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was
|
|
1699 modified. */
|
|
1700
|
|
1701 bool
|
|
1702 ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt,
|
|
1703 gimple_seq *extra_stmts)
|
|
1704 {
|
|
1705 bool modified = false;
|
|
1706 tree *t;
|
|
1707
|
|
1708 switch (gimple_code (*stmt))
|
|
1709 {
|
|
1710 case GIMPLE_RETURN:
|
|
1711 t = gimple_return_retval_ptr (as_a <greturn *> (*stmt));
|
|
1712 if (m_adjustments && m_adjustments->m_skip_return)
|
|
1713 *t = NULL_TREE;
|
|
1714 else if (*t != NULL_TREE)
|
|
1715 modified |= modify_expression (t, true);
|
|
1716 break;
|
|
1717
|
|
1718 case GIMPLE_ASSIGN:
|
|
1719 modified |= modify_assignment (*stmt, extra_stmts);
|
|
1720 break;
|
|
1721
|
|
1722 case GIMPLE_CALL:
|
|
1723 modified |= modify_call_stmt ((gcall **) stmt);
|
|
1724 break;
|
|
1725
|
|
1726 case GIMPLE_ASM:
|
|
1727 {
|
|
1728 gasm *asm_stmt = as_a <gasm *> (*stmt);
|
|
1729 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
|
|
1730 {
|
|
1731 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
|
|
1732 modified |= modify_expression (t, true);
|
|
1733 }
|
|
1734 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
|
|
1735 {
|
|
1736 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
|
|
1737 modified |= modify_expression (t, false);
|
|
1738 }
|
|
1739 }
|
|
1740 break;
|
|
1741
|
|
1742 default:
|
|
1743 break;
|
|
1744 }
|
|
1745 return modified;
|
|
1746 }
|
|
1747
|
|
1748
|
|
1749 /* Traverse body of the current function and perform the requested adjustments
|
|
1750 on its statements. Return true iff the CFG has been changed. */
|
|
1751
|
|
1752 bool
|
|
1753 ipa_param_body_adjustments::modify_cfun_body ()
|
|
1754 {
|
|
1755 bool cfg_changed = false;
|
|
1756 basic_block bb;
|
|
1757
|
|
1758 FOR_EACH_BB_FN (bb, cfun)
|
|
1759 {
|
|
1760 gimple_stmt_iterator gsi;
|
|
1761
|
|
1762 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1763 {
|
|
1764 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
|
|
1765 tree new_lhs, old_lhs = gimple_phi_result (phi);
|
|
1766 new_lhs = replace_removed_params_ssa_names (old_lhs, phi);
|
|
1767 if (new_lhs)
|
|
1768 {
|
|
1769 gimple_phi_set_result (phi, new_lhs);
|
|
1770 release_ssa_name (old_lhs);
|
|
1771 }
|
|
1772 }
|
|
1773
|
|
1774 gsi = gsi_start_bb (bb);
|
|
1775 while (!gsi_end_p (gsi))
|
|
1776 {
|
|
1777 gimple *stmt = gsi_stmt (gsi);
|
|
1778 gimple *stmt_copy = stmt;
|
|
1779 gimple_seq extra_stmts = NULL;
|
|
1780 bool modified = modify_gimple_stmt (&stmt, &extra_stmts);
|
|
1781 if (stmt != stmt_copy)
|
|
1782 {
|
|
1783 gcc_checking_assert (modified);
|
|
1784 gsi_replace (&gsi, stmt, false);
|
|
1785 }
|
|
1786 if (!gimple_seq_empty_p (extra_stmts))
|
|
1787 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT);
|
|
1788
|
|
1789 def_operand_p defp;
|
|
1790 ssa_op_iter iter;
|
|
1791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
|
|
1792 {
|
|
1793 tree old_def = DEF_FROM_PTR (defp);
|
|
1794 if (tree new_def = replace_removed_params_ssa_names (old_def,
|
|
1795 stmt))
|
|
1796 {
|
|
1797 SET_DEF (defp, new_def);
|
|
1798 release_ssa_name (old_def);
|
|
1799 modified = true;
|
|
1800 }
|
|
1801 }
|
|
1802
|
|
1803 if (modified)
|
|
1804 {
|
|
1805 update_stmt (stmt);
|
|
1806 if (maybe_clean_eh_stmt (stmt)
|
|
1807 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
|
|
1808 cfg_changed = true;
|
|
1809 }
|
|
1810 gsi_next (&gsi);
|
|
1811 }
|
|
1812 }
|
|
1813
|
|
1814 return cfg_changed;
|
|
1815 }
|
|
1816
|
|
1817 /* Call gimple_debug_bind_reset_value on all debug statements describing
|
|
1818 gimple register parameters that are being removed or replaced. */
|
131
|
1819
|
|
1820 void
|
145
|
1821 ipa_param_body_adjustments::reset_debug_stmts ()
|
131
|
1822 {
|
145
|
1823 int i, len;
|
|
1824 gimple_stmt_iterator *gsip = NULL, gsi;
|
131
|
1825
|
145
|
1826 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
|
|
1827 {
|
|
1828 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
|
1829 gsip = &gsi;
|
|
1830 }
|
|
1831 len = m_reset_debug_decls.length ();
|
131
|
1832 for (i = 0; i < len; i++)
|
|
1833 {
|
145
|
1834 imm_use_iterator ui;
|
|
1835 gimple *stmt;
|
|
1836 gdebug *def_temp;
|
|
1837 tree name, vexpr, copy = NULL_TREE;
|
|
1838 use_operand_p use_p;
|
|
1839 tree decl = m_reset_debug_decls[i];
|
131
|
1840
|
145
|
1841 gcc_checking_assert (is_gimple_reg (decl));
|
|
1842 name = ssa_default_def (cfun, decl);
|
|
1843 vexpr = NULL;
|
|
1844 if (name)
|
|
1845 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
|
|
1846 {
|
|
1847 if (gimple_clobber_p (stmt))
|
|
1848 {
|
|
1849 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
|
|
1850 unlink_stmt_vdef (stmt);
|
|
1851 gsi_remove (&cgsi, true);
|
|
1852 release_defs (stmt);
|
|
1853 continue;
|
|
1854 }
|
|
1855 /* All other users must have been removed by function body
|
|
1856 modification. */
|
|
1857 gcc_assert (is_gimple_debug (stmt));
|
|
1858 if (vexpr == NULL && gsip != NULL)
|
|
1859 {
|
|
1860 vexpr = make_node (DEBUG_EXPR_DECL);
|
|
1861 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL);
|
|
1862 DECL_ARTIFICIAL (vexpr) = 1;
|
|
1863 TREE_TYPE (vexpr) = TREE_TYPE (name);
|
|
1864 SET_DECL_MODE (vexpr, DECL_MODE (decl));
|
|
1865 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
|
|
1866 }
|
|
1867 if (vexpr)
|
|
1868 {
|
|
1869 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
|
|
1870 SET_USE (use_p, vexpr);
|
|
1871 }
|
|
1872 else
|
|
1873 gimple_debug_bind_reset_value (stmt);
|
|
1874 update_stmt (stmt);
|
|
1875 }
|
|
1876 /* Create a VAR_DECL for debug info purposes. */
|
|
1877 if (!DECL_IGNORED_P (decl))
|
131
|
1878 {
|
145
|
1879 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
|
|
1880 VAR_DECL, DECL_NAME (decl),
|
|
1881 TREE_TYPE (decl));
|
|
1882 if (DECL_PT_UID_SET_P (decl))
|
|
1883 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
|
|
1884 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
|
|
1885 TREE_READONLY (copy) = TREE_READONLY (decl);
|
|
1886 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
|
|
1887 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
|
|
1888 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
|
|
1889 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
|
|
1890 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
|
|
1891 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
|
|
1892 SET_DECL_RTL (copy, 0);
|
|
1893 TREE_USED (copy) = 1;
|
|
1894 DECL_CONTEXT (copy) = current_function_decl;
|
|
1895 add_local_decl (cfun, copy);
|
|
1896 DECL_CHAIN (copy)
|
|
1897 = BLOCK_VARS (DECL_INITIAL (current_function_decl));
|
|
1898 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
|
131
|
1899 }
|
145
|
1900 if (gsip != NULL && copy && target_for_debug_bind (decl))
|
131
|
1901 {
|
145
|
1902 gcc_assert (TREE_CODE (decl) == PARM_DECL);
|
|
1903 if (vexpr)
|
|
1904 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
|
|
1905 else
|
|
1906 def_temp = gimple_build_debug_source_bind (copy, decl,
|
|
1907 NULL);
|
|
1908 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
|
131
|
1909 }
|
|
1910 }
|
|
1911 }
|
|
1912
|
145
|
1913 /* Perform all necessary body changes to change signature, body and debug info
|
|
1914 of fun according to adjustments passed at construction. Return true if CFG
|
|
1915 was changed in any way. The main entry point for modification of standalone
|
|
1916 functions that is not part of IPA clone materialization. */
|
|
1917
|
|
1918 bool
|
|
1919 ipa_param_body_adjustments::perform_cfun_body_modifications ()
|
|
1920 {
|
|
1921 bool cfg_changed;
|
|
1922 modify_formal_parameters ();
|
|
1923 cfg_changed = modify_cfun_body ();
|
|
1924 reset_debug_stmts ();
|
|
1925
|
|
1926 return cfg_changed;
|
|
1927 }
|
|
1928
|