Mercurial > hg > CbC > CbC_gcc
comparison gcc/ipa-param-manipulation.c @ 146:351920fa3827
merge
author | anatofuz <anatofuz@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Sun, 01 Mar 2020 16:13:28 +0900 |
parents | 1830386684a0 |
children |
comparison
equal
deleted
inserted
replaced
144:8f4e72ab4e11 | 146:351920fa3827 |
---|---|
1 /* Manipulation of formal and actual parameters of functions and function | 1 /* Manipulation of formal and actual parameters of functions and function |
2 calls. | 2 calls. |
3 Copyright (C) 2017-2018 Free Software Foundation, Inc. | 3 Copyright (C) 2017-2020 Free Software Foundation, Inc. |
4 | 4 |
5 This file is part of GCC. | 5 This file is part of GCC. |
6 | 6 |
7 GCC is free software; you can redistribute it and/or modify it under | 7 GCC is free software; you can redistribute it and/or modify it under |
8 the terms of the GNU General Public License as published by the Free | 8 the terms of the GNU General Public License as published by the Free |
20 | 20 |
21 #include "config.h" | 21 #include "config.h" |
22 #include "system.h" | 22 #include "system.h" |
23 #include "coretypes.h" | 23 #include "coretypes.h" |
24 #include "backend.h" | 24 #include "backend.h" |
25 #include "rtl.h" | |
26 #include "tree.h" | 25 #include "tree.h" |
27 #include "gimple.h" | 26 #include "gimple.h" |
28 #include "ssa.h" | 27 #include "ssa.h" |
29 #include "cgraph.h" | 28 #include "cgraph.h" |
30 #include "fold-const.h" | 29 #include "fold-const.h" |
30 #include "tree-eh.h" | |
31 #include "stor-layout.h" | 31 #include "stor-layout.h" |
32 #include "gimplify.h" | 32 #include "gimplify.h" |
33 #include "gimple-iterator.h" | 33 #include "gimple-iterator.h" |
34 #include "gimplify-me.h" | 34 #include "gimplify-me.h" |
35 #include "tree-cfg.h" | |
35 #include "tree-dfa.h" | 36 #include "tree-dfa.h" |
36 #include "ipa-param-manipulation.h" | 37 #include "ipa-param-manipulation.h" |
37 #include "print-tree.h" | 38 #include "print-tree.h" |
38 #include "gimple-pretty-print.h" | 39 #include "gimple-pretty-print.h" |
39 #include "builtins.h" | 40 #include "builtins.h" |
40 | 41 #include "tree-ssa.h" |
41 /* Return a heap allocated vector containing formal parameters of FNDECL. */ | 42 #include "tree-inline.h" |
42 | 43 |
43 vec<tree> | 44 |
44 ipa_get_vector_of_formal_parms (tree fndecl) | 45 /* Actual prefixes of different newly synthetized parameters. Keep in sync |
45 { | 46 with IPA_PARAM_PREFIX_* defines. */ |
46 vec<tree> args; | 47 |
48 static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT] | |
49 = {"SYNTH", | |
50 "ISRA", | |
51 "simd", | |
52 "mask"}; | |
53 | |
54 /* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */ | |
55 | |
56 static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT] | |
57 = {"IPA_PARAM_OP_UNDEFINED", | |
58 "IPA_PARAM_OP_COPY", | |
59 "IPA_PARAM_OP_NEW", | |
60 "IPA_PARAM_OP_SPLIT"}; | |
61 | |
62 /* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of | |
63 FNDECL. The function should not be called during LTO WPA phase except for | |
64 thunks (or functions with bodies streamed in). */ | |
65 | |
66 void | |
67 push_function_arg_decls (vec<tree> *args, tree fndecl) | |
68 { | |
47 int count; | 69 int count; |
48 tree parm; | 70 tree parm; |
49 | 71 |
50 gcc_assert (!flag_wpa); | 72 /* Safety check that we do not attempt to use the function in WPA, except |
73 when the function is a thunk and then we have DECL_ARGUMENTS or when we | |
74 have already explicitely loaded its body. */ | |
75 gcc_assert (!flag_wpa | |
76 || DECL_ARGUMENTS (fndecl) | |
77 || gimple_has_body_p (fndecl)); | |
51 count = 0; | 78 count = 0; |
52 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) | 79 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) |
53 count++; | 80 count++; |
54 | 81 |
55 args.create (count); | 82 args->reserve_exact (count); |
56 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) | 83 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm)) |
57 args.quick_push (parm); | 84 args->quick_push (parm); |
58 | 85 } |
59 return args; | 86 |
60 } | 87 /* Fill an empty vector TYPES with trees representing formal parameters of |
61 | |
62 /* Return a heap allocated vector containing types of formal parameters of | |
63 function type FNTYPE. */ | 88 function type FNTYPE. */ |
64 | 89 |
65 vec<tree> | 90 void |
66 ipa_get_vector_of_formal_parm_types (tree fntype) | 91 push_function_arg_types (vec<tree> *types, tree fntype) |
67 { | 92 { |
68 vec<tree> types; | |
69 int count = 0; | 93 int count = 0; |
70 tree t; | 94 tree t; |
71 | 95 |
72 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t)) | 96 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t)) |
73 count++; | 97 count++; |
74 | 98 |
75 types.create (count); | 99 types->reserve_exact (count); |
76 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t)) | 100 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t)) |
77 types.quick_push (TREE_VALUE (t)); | 101 types->quick_push (TREE_VALUE (t)); |
78 | 102 } |
79 return types; | 103 |
80 } | 104 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human |
81 | 105 friendly way, assuming they are meant to be applied to FNDECL. */ |
82 /* Modify the function declaration FNDECL and its type according to the plan in | |
83 ADJUSTMENTS. It also sets base fields of individual adjustments structures | |
84 to reflect the actual parameters being modified which are determined by the | |
85 base_index field. */ | |
86 | 106 |
87 void | 107 void |
88 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments) | 108 ipa_dump_adjusted_parameters (FILE *f, |
89 { | 109 vec<ipa_adjusted_param, va_gc> *adj_params) |
90 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl); | 110 { |
91 tree orig_type = TREE_TYPE (fndecl); | 111 unsigned i, len = vec_safe_length (adj_params); |
92 tree old_arg_types = TYPE_ARG_TYPES (orig_type); | 112 bool first = true; |
93 | 113 |
94 /* The following test is an ugly hack, some functions simply don't have any | 114 fprintf (f, " IPA adjusted parameters: "); |
95 arguments in their type. This is probably a bug but well... */ | 115 for (i = 0; i < len; i++) |
96 bool care_for_types = (old_arg_types != NULL_TREE); | 116 { |
97 bool last_parm_void; | 117 struct ipa_adjusted_param *apm; |
98 vec<tree> otypes; | 118 apm = &(*adj_params)[i]; |
99 if (care_for_types) | 119 |
100 { | 120 if (!first) |
101 last_parm_void = (TREE_VALUE (tree_last (old_arg_types)) | 121 fprintf (f, " "); |
102 == void_type_node); | |
103 otypes = ipa_get_vector_of_formal_parm_types (orig_type); | |
104 if (last_parm_void) | |
105 gcc_assert (oparms.length () + 1 == otypes.length ()); | |
106 else | 122 else |
107 gcc_assert (oparms.length () == otypes.length ()); | 123 first = false; |
108 } | 124 |
109 else | 125 fprintf (f, "%i. %s %s", i, ipa_param_op_names[apm->op], |
110 { | 126 apm->prev_clone_adjustment ? "prev_clone_adjustment " : ""); |
111 last_parm_void = false; | 127 switch (apm->op) |
112 otypes.create (0); | 128 { |
113 } | 129 case IPA_PARAM_OP_UNDEFINED: |
114 | 130 break; |
115 int len = adjustments.length (); | 131 |
116 tree *link = &DECL_ARGUMENTS (fndecl); | 132 case IPA_PARAM_OP_COPY: |
133 fprintf (f, ", base_index: %u", apm->base_index); | |
134 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index); | |
135 break; | |
136 | |
137 case IPA_PARAM_OP_SPLIT: | |
138 fprintf (f, ", offset: %u", apm->unit_offset); | |
139 /* fall-through */ | |
140 case IPA_PARAM_OP_NEW: | |
141 fprintf (f, ", base_index: %u", apm->base_index); | |
142 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index); | |
143 print_node_brief (f, ", type: ", apm->type, 0); | |
144 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0); | |
145 fprintf (f, " prefix: %s", | |
146 ipa_param_prefixes[apm->param_prefix_index]); | |
147 if (apm->reverse) | |
148 fprintf (f, ", reverse-sso"); | |
149 break; | |
150 } | |
151 fprintf (f, "\n"); | |
152 } | |
153 } | |
154 | |
155 /* Fill NEW_TYPES with types of a function after its current OTYPES have been | |
156 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use | |
157 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter | |
158 is false. */ | |
159 | |
160 static void | |
161 fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes, | |
162 vec<ipa_adjusted_param, va_gc> *adj_params, | |
163 bool use_prev_indices) | |
164 { | |
165 unsigned adj_len = vec_safe_length (adj_params); | |
166 new_types->reserve_exact (adj_len); | |
167 for (unsigned i = 0; i < adj_len ; i++) | |
168 { | |
169 ipa_adjusted_param *apm = &(*adj_params)[i]; | |
170 if (apm->op == IPA_PARAM_OP_COPY) | |
171 { | |
172 unsigned index | |
173 = use_prev_indices ? apm->prev_clone_index : apm->base_index; | |
174 /* The following needs to be handled gracefully because of type | |
175 mismatches. This happens with LTO but apparently also in Fortran | |
176 with -fcoarray=lib -O2 -lcaf_single -latomic. */ | |
177 if (index >= otypes->length ()) | |
178 continue; | |
179 new_types->quick_push ((*otypes)[index]); | |
180 } | |
181 else if (apm->op == IPA_PARAM_OP_NEW | |
182 || apm->op == IPA_PARAM_OP_SPLIT) | |
183 { | |
184 tree ntype = apm->type; | |
185 if (is_gimple_reg_type (ntype) | |
186 && TYPE_MODE (ntype) != BLKmode) | |
187 { | |
188 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype)); | |
189 if (TYPE_ALIGN (ntype) != malign) | |
190 ntype = build_aligned_type (ntype, malign); | |
191 } | |
192 new_types->quick_push (ntype); | |
193 } | |
194 else | |
195 gcc_unreachable (); | |
196 } | |
197 } | |
198 | |
199 /* Build and return a function type just like ORIG_TYPE but with parameter | |
200 types given in NEW_PARAM_TYPES - which can be NULL if, but only if, | |
201 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make | |
202 it a FUNCTION_TYPE instead of FUNCTION_TYPE. */ | |
203 | |
204 static tree | |
205 build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types, | |
206 bool method2func, bool skip_return) | |
207 { | |
117 tree new_arg_types = NULL; | 208 tree new_arg_types = NULL; |
118 for (int i = 0; i < len; i++) | 209 if (TYPE_ARG_TYPES (orig_type)) |
119 { | 210 { |
120 struct ipa_parm_adjustment *adj; | 211 gcc_checking_assert (new_param_types); |
121 gcc_assert (link); | 212 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type))) |
122 | 213 == void_type_node); |
123 adj = &adjustments[i]; | 214 unsigned len = new_param_types->length (); |
124 tree parm; | 215 for (unsigned i = 0; i < len; i++) |
125 if (adj->op == IPA_PARM_OP_NEW) | 216 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i], |
126 parm = NULL; | 217 new_arg_types); |
127 else | 218 |
128 parm = oparms[adj->base_index]; | 219 tree new_reversed = nreverse (new_arg_types); |
129 adj->base = parm; | |
130 | |
131 if (adj->op == IPA_PARM_OP_COPY) | |
132 { | |
133 if (care_for_types) | |
134 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index], | |
135 new_arg_types); | |
136 *link = parm; | |
137 link = &DECL_CHAIN (parm); | |
138 } | |
139 else if (adj->op != IPA_PARM_OP_REMOVE) | |
140 { | |
141 tree new_parm; | |
142 tree ptype; | |
143 | |
144 if (adj->by_ref) | |
145 ptype = build_pointer_type (adj->type); | |
146 else | |
147 { | |
148 ptype = adj->type; | |
149 if (is_gimple_reg_type (ptype) | |
150 && TYPE_MODE (ptype) != BLKmode) | |
151 { | |
152 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype)); | |
153 if (TYPE_ALIGN (ptype) != malign) | |
154 ptype = build_aligned_type (ptype, malign); | |
155 } | |
156 } | |
157 | |
158 if (care_for_types) | |
159 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types); | |
160 | |
161 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE, | |
162 ptype); | |
163 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH"; | |
164 DECL_NAME (new_parm) = create_tmp_var_name (prefix); | |
165 DECL_ARTIFICIAL (new_parm) = 1; | |
166 DECL_ARG_TYPE (new_parm) = ptype; | |
167 DECL_CONTEXT (new_parm) = fndecl; | |
168 TREE_USED (new_parm) = 1; | |
169 DECL_IGNORED_P (new_parm) = 1; | |
170 layout_decl (new_parm, 0); | |
171 | |
172 if (adj->op == IPA_PARM_OP_NEW) | |
173 adj->base = NULL; | |
174 else | |
175 adj->base = parm; | |
176 adj->new_decl = new_parm; | |
177 | |
178 *link = new_parm; | |
179 link = &DECL_CHAIN (new_parm); | |
180 } | |
181 } | |
182 | |
183 *link = NULL_TREE; | |
184 | |
185 tree new_reversed = NULL; | |
186 if (care_for_types) | |
187 { | |
188 new_reversed = nreverse (new_arg_types); | |
189 if (last_parm_void) | 220 if (last_parm_void) |
190 { | 221 { |
191 if (new_reversed) | 222 if (new_reversed) |
192 TREE_CHAIN (new_arg_types) = void_list_node; | 223 TREE_CHAIN (new_arg_types) = void_list_node; |
193 else | 224 else |
194 new_reversed = void_list_node; | 225 new_reversed = void_list_node; |
195 } | 226 } |
196 } | 227 new_arg_types = new_reversed; |
197 | 228 } |
198 /* Use copy_node to preserve as much as possible from original type | 229 |
199 (debug info, attribute lists etc.) | 230 /* Use build_distinct_type_copy to preserve as much as possible from original |
200 Exception is METHOD_TYPEs must have THIS argument. | 231 type (debug info, attribute lists etc.). The one exception is |
201 When we are asked to remove it, we need to build new FUNCTION_TYPE | 232 METHOD_TYPEs which must have THIS argument and when we are asked to remove |
202 instead. */ | 233 it, we need to build new FUNCTION_TYPE instead. */ |
203 tree new_type = NULL; | 234 tree new_type = NULL; |
204 if (TREE_CODE (orig_type) != METHOD_TYPE | 235 if (method2func) |
205 || (adjustments[0].op == IPA_PARM_OP_COPY | 236 { |
206 && adjustments[0].base_index == 0)) | 237 tree ret_type; |
238 if (skip_return) | |
239 ret_type = void_type_node; | |
240 else | |
241 ret_type = TREE_TYPE (orig_type); | |
242 | |
243 new_type | |
244 = build_distinct_type_copy (build_function_type (ret_type, | |
245 new_arg_types)); | |
246 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); | |
247 } | |
248 else | |
207 { | 249 { |
208 new_type = build_distinct_type_copy (orig_type); | 250 new_type = build_distinct_type_copy (orig_type); |
209 TYPE_ARG_TYPES (new_type) = new_reversed; | 251 TYPE_ARG_TYPES (new_type) = new_arg_types; |
252 if (skip_return) | |
253 TREE_TYPE (new_type) = void_type_node; | |
254 } | |
255 | |
256 return new_type; | |
257 } | |
258 | |
259 /* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there | |
260 is none. */ | |
261 | |
262 int | |
263 ipa_param_adjustments::get_max_base_index () | |
264 { | |
265 unsigned adj_len = vec_safe_length (m_adj_params); | |
266 int max_index = -1; | |
267 for (unsigned i = 0; i < adj_len ; i++) | |
268 { | |
269 ipa_adjusted_param *apm = &(*m_adj_params)[i]; | |
270 if (apm->op == IPA_PARAM_OP_COPY | |
271 && max_index < apm->base_index) | |
272 max_index = apm->base_index; | |
273 } | |
274 return max_index; | |
275 } | |
276 | |
277 | |
278 /* Fill SURVIVING_PARAMS with an array of bools where each one says whether a | |
279 parameter that originally was at that position still survives in the given | |
280 clone or is removed/replaced. If the final array is smaller than an index | |
281 of an original parameter, that parameter also did not survive. That a | |
282 parameter survives does not mean it has the same index as before. */ | |
283 | |
284 void | |
285 ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params) | |
286 { | |
287 unsigned adj_len = vec_safe_length (m_adj_params); | |
288 int max_index = get_max_base_index (); | |
289 | |
290 if (max_index < 0) | |
291 return; | |
292 surviving_params->reserve_exact (max_index + 1); | |
293 surviving_params->quick_grow_cleared (max_index + 1); | |
294 for (unsigned i = 0; i < adj_len ; i++) | |
295 { | |
296 ipa_adjusted_param *apm = &(*m_adj_params)[i]; | |
297 if (apm->op == IPA_PARAM_OP_COPY) | |
298 (*surviving_params)[apm->base_index] = true; | |
299 } | |
300 } | |
301 | |
302 /* Fill NEW_INDICES with new indices of each surviving parameter or -1 for | |
303 those which do not survive. Any parameter outside of lenght of the vector | |
304 does not survive. There is currently no support for a parameter to be | |
305 copied to two distinct new parameters. */ | |
306 | |
307 void | |
308 ipa_param_adjustments::get_updated_indices (vec<int> *new_indices) | |
309 { | |
310 unsigned adj_len = vec_safe_length (m_adj_params); | |
311 int max_index = get_max_base_index (); | |
312 | |
313 if (max_index < 0) | |
314 return; | |
315 unsigned res_len = max_index + 1; | |
316 new_indices->reserve_exact (res_len); | |
317 for (unsigned i = 0; i < res_len ; i++) | |
318 new_indices->quick_push (-1); | |
319 for (unsigned i = 0; i < adj_len ; i++) | |
320 { | |
321 ipa_adjusted_param *apm = &(*m_adj_params)[i]; | |
322 if (apm->op == IPA_PARAM_OP_COPY) | |
323 (*new_indices)[apm->base_index] = i; | |
324 } | |
325 } | |
326 | |
327 /* Return the original index for the given new parameter index. Return a | |
328 negative number if not available. */ | |
329 | |
330 int | |
331 ipa_param_adjustments::get_original_index (int newidx) | |
332 { | |
333 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx]; | |
334 if (adj->op != IPA_PARAM_OP_COPY) | |
335 return -1; | |
336 return adj->base_index; | |
337 } | |
338 | |
339 /* Return true if the first parameter (assuming there was one) survives the | |
340 transformation intact and remains the first one. */ | |
341 | |
342 bool | |
343 ipa_param_adjustments::first_param_intact_p () | |
344 { | |
345 return (!vec_safe_is_empty (m_adj_params) | |
346 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY | |
347 && (*m_adj_params)[0].base_index == 0); | |
348 } | |
349 | |
350 /* Return true if we have to change what has formerly been a method into a | |
351 function. */ | |
352 | |
353 bool | |
354 ipa_param_adjustments::method2func_p (tree orig_type) | |
355 { | |
356 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ()); | |
357 } | |
358 | |
359 /* Given function type OLD_TYPE, return a new type derived from it after | |
360 performing all atored modifications. TYPE_ORIGINAL_P should be true when | |
361 OLD_TYPE refers to the type before any IPA transformations, as opposed to a | |
362 type that can be an intermediate one in between various IPA | |
363 transformations. */ | |
364 | |
365 tree | |
366 ipa_param_adjustments::build_new_function_type (tree old_type, | |
367 bool type_original_p) | |
368 { | |
369 auto_vec<tree,16> new_param_types, *new_param_types_p; | |
370 if (prototype_p (old_type)) | |
371 { | |
372 auto_vec<tree, 16> otypes; | |
373 push_function_arg_types (&otypes, old_type); | |
374 fill_vector_of_new_param_types (&new_param_types, &otypes, m_adj_params, | |
375 !type_original_p); | |
376 new_param_types_p = &new_param_types; | |
210 } | 377 } |
211 else | 378 else |
212 { | 379 new_param_types_p = NULL; |
213 new_type | 380 |
214 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type), | 381 return build_adjusted_function_type (old_type, new_param_types_p, |
215 new_reversed)); | 382 method2func_p (old_type), m_skip_return); |
216 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); | 383 } |
217 DECL_VINDEX (fndecl) = NULL_TREE; | 384 |
218 } | 385 /* Build variant of function decl ORIG_DECL which has no return value if |
386 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has | |
387 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from | |
388 DECL_ARGUMENTS list are not processed now, since they are linked by | |
389 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is | |
390 responsible for eliminating them when clones are properly materialized. */ | |
391 | |
392 tree | |
393 ipa_param_adjustments::adjust_decl (tree orig_decl) | |
394 { | |
395 tree new_decl = copy_node (orig_decl); | |
396 tree orig_type = TREE_TYPE (orig_decl); | |
397 if (prototype_p (orig_type) | |
398 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type)))) | |
399 { | |
400 tree new_type = build_new_function_type (orig_type, false); | |
401 TREE_TYPE (new_decl) = new_type; | |
402 } | |
403 if (method2func_p (orig_type)) | |
404 DECL_VINDEX (new_decl) = NULL_TREE; | |
219 | 405 |
220 /* When signature changes, we need to clear builtin info. */ | 406 /* When signature changes, we need to clear builtin info. */ |
221 if (fndecl_built_in_p (fndecl)) | 407 if (fndecl_built_in_p (new_decl)) |
222 { | 408 set_decl_built_in_function (new_decl, NOT_BUILT_IN, 0); |
223 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN; | 409 |
224 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0; | 410 DECL_VIRTUAL_P (new_decl) = 0; |
225 } | 411 DECL_LANG_SPECIFIC (new_decl) = NULL; |
226 | 412 |
227 TREE_TYPE (fndecl) = new_type; | 413 return new_decl; |
228 DECL_VIRTUAL_P (fndecl) = 0; | 414 } |
229 DECL_LANG_SPECIFIC (fndecl) = NULL; | 415 |
230 otypes.release (); | 416 /* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA |
231 oparms.release (); | 417 transformations. Return true if EXPR has an interesting form and fill in |
232 } | 418 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */ |
233 | 419 |
234 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS. | 420 static bool |
235 If this is a directly recursive call, CS must be NULL. Otherwise it must | 421 isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p) |
236 contain the corresponding call graph edge. */ | 422 { |
237 | 423 HOST_WIDE_INT offset, size; |
238 void | 424 bool reverse; |
239 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt, | 425 tree base |
240 ipa_parm_adjustment_vec adjustments) | 426 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse); |
241 { | 427 if (!base || size < 0) |
242 struct cgraph_node *current_node = cgraph_node::get (current_function_decl); | 428 return false; |
243 vec<tree> vargs; | 429 |
244 vec<tree, va_gc> **debug_args = NULL; | 430 if ((offset % BITS_PER_UNIT) != 0) |
245 gcall *new_stmt; | 431 return false; |
246 gimple_stmt_iterator gsi, prev_gsi; | 432 |
247 tree callee_decl; | 433 if (TREE_CODE (base) == MEM_REF) |
248 int i, len; | 434 { |
249 | 435 poly_int64 plmoff = mem_ref_offset (base).force_shwi (); |
250 len = adjustments.length (); | 436 HOST_WIDE_INT moff; |
251 vargs.create (len); | 437 bool is_cst = plmoff.is_constant (&moff); |
252 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl; | 438 if (!is_cst) |
253 current_node->remove_stmt_references (stmt); | 439 return false; |
254 | 440 offset += moff * BITS_PER_UNIT; |
255 gsi = gsi_for_stmt (stmt); | 441 base = TREE_OPERAND (base, 0); |
256 prev_gsi = gsi; | 442 } |
443 | |
444 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX) | |
445 return false; | |
446 | |
447 *base_p = base; | |
448 *unit_offset_p = offset / BITS_PER_UNIT; | |
449 return true; | |
450 } | |
451 | |
452 /* Return true if EXPR describes a transitive split (i.e. one that happened for | |
453 both the caller and the callee) as recorded in PERFORMED_SPLITS. In that | |
454 case, store index of the respective record in PERFORMED_SPLITS into | |
455 *SM_IDX_P and the unit offset from all handled components in EXPR into | |
456 *UNIT_OFFSET_P. */ | |
457 | |
458 static bool | |
459 transitive_split_p (vec<ipa_param_performed_split, va_gc> *performed_splits, | |
460 tree expr, unsigned *sm_idx_p, unsigned *unit_offset_p) | |
461 { | |
462 tree base; | |
463 if (!isra_get_ref_base_and_offset (expr, &base, unit_offset_p)) | |
464 return false; | |
465 | |
466 if (TREE_CODE (base) == SSA_NAME) | |
467 { | |
468 base = SSA_NAME_VAR (base); | |
469 if (!base) | |
470 return false; | |
471 } | |
472 | |
473 unsigned len = vec_safe_length (performed_splits); | |
474 for (unsigned i = 0 ; i < len; i++) | |
475 { | |
476 ipa_param_performed_split *sm = &(*performed_splits)[i]; | |
477 if (sm->dummy_decl == base) | |
478 { | |
479 *sm_idx_p = i; | |
480 return true; | |
481 } | |
482 } | |
483 return false; | |
484 } | |
485 | |
486 /* Structure to hold declarations representing transitive IPA-SRA splits. In | |
487 essence, if we need to pass UNIT_OFFSET of a parameter which originally has | |
488 number BASE_INDEX, we should pass down REPL. */ | |
489 | |
490 struct transitive_split_map | |
491 { | |
492 tree repl; | |
493 unsigned base_index; | |
494 unsigned unit_offset; | |
495 }; | |
496 | |
497 /* If call STMT contains any parameters representing transitive splits as | |
498 described by PERFORMED_SPLITS, return the number of extra parameters that | |
499 were addded during clone materialization and fill in INDEX_MAP with adjusted | |
500 indices of corresponding original parameters and TRANS_MAP with description | |
501 of all transitive replacement descriptions. Otherwise return zero. */ | |
502 | |
503 static unsigned | |
504 init_transitive_splits (vec<ipa_param_performed_split, va_gc> *performed_splits, | |
505 gcall *stmt, vec <unsigned> *index_map, | |
506 auto_vec <transitive_split_map> *trans_map) | |
507 { | |
508 unsigned phony_arguments = 0; | |
509 unsigned stmt_idx = 0, base_index = 0; | |
510 unsigned nargs = gimple_call_num_args (stmt); | |
511 while (stmt_idx < nargs) | |
512 { | |
513 unsigned unit_offset_delta; | |
514 tree base_arg = gimple_call_arg (stmt, stmt_idx); | |
515 | |
516 if (phony_arguments > 0) | |
517 index_map->safe_push (stmt_idx); | |
518 | |
519 unsigned sm_idx; | |
520 stmt_idx++; | |
521 if (transitive_split_p (performed_splits, base_arg, &sm_idx, | |
522 &unit_offset_delta)) | |
523 { | |
524 if (phony_arguments == 0) | |
525 /* We have optimistically avoided constructing index_map do far but | |
526 now it is clear it will be necessary, so let's create the easy | |
527 bit we skipped until now. */ | |
528 for (unsigned k = 0; k < stmt_idx; k++) | |
529 index_map->safe_push (k); | |
530 | |
531 tree dummy = (*performed_splits)[sm_idx].dummy_decl; | |
532 for (unsigned j = sm_idx; j < performed_splits->length (); j++) | |
533 { | |
534 ipa_param_performed_split *caller_split | |
535 = &(*performed_splits)[j]; | |
536 if (caller_split->dummy_decl != dummy) | |
537 break; | |
538 | |
539 tree arg = gimple_call_arg (stmt, stmt_idx); | |
540 struct transitive_split_map tsm; | |
541 tsm.repl = arg; | |
542 tsm.base_index = base_index; | |
543 if (caller_split->unit_offset >= unit_offset_delta) | |
544 { | |
545 tsm.unit_offset | |
546 = (caller_split->unit_offset - unit_offset_delta); | |
547 trans_map->safe_push (tsm); | |
548 } | |
549 | |
550 phony_arguments++; | |
551 stmt_idx++; | |
552 } | |
553 } | |
554 base_index++; | |
555 } | |
556 return phony_arguments; | |
557 } | |
558 | |
559 /* Modify actual arguments of a function call in statement STMT, assuming it | |
560 calls CALLEE_DECL. CALLER_ADJ must be the description of parameter | |
561 adjustments of the caller or NULL if there are none. Return the new | |
562 statement that replaced the old one. When invoked, cfun and | |
563 current_function_decl have to be set to the caller. */ | |
564 | |
565 gcall * | |
566 ipa_param_adjustments::modify_call (gcall *stmt, | |
567 vec<ipa_param_performed_split, | |
568 va_gc> *performed_splits, | |
569 tree callee_decl, bool update_references) | |
570 { | |
571 unsigned len = vec_safe_length (m_adj_params); | |
572 auto_vec<tree, 16> vargs (len); | |
573 tree old_decl = gimple_call_fndecl (stmt); | |
574 unsigned old_nargs = gimple_call_num_args (stmt); | |
575 auto_vec<bool, 16> kept (old_nargs); | |
576 kept.quick_grow_cleared (old_nargs); | |
577 | |
578 auto_vec <unsigned, 16> index_map; | |
579 auto_vec <transitive_split_map> trans_map; | |
580 bool transitive_remapping = false; | |
581 | |
582 if (performed_splits) | |
583 { | |
584 unsigned removed = init_transitive_splits (performed_splits, | |
585 stmt, &index_map, &trans_map); | |
586 if (removed > 0) | |
587 { | |
588 transitive_remapping = true; | |
589 old_nargs -= removed; | |
590 } | |
591 } | |
592 | |
593 cgraph_node *current_node = cgraph_node::get (current_function_decl); | |
594 if (update_references) | |
595 current_node->remove_stmt_references (stmt); | |
596 | |
597 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
598 gimple_stmt_iterator prev_gsi = gsi; | |
257 gsi_prev (&prev_gsi); | 599 gsi_prev (&prev_gsi); |
258 for (i = 0; i < len; i++) | 600 for (unsigned i = 0; i < len; i++) |
259 { | 601 { |
260 struct ipa_parm_adjustment *adj; | 602 ipa_adjusted_param *apm = &(*m_adj_params)[i]; |
261 | 603 if (apm->op == IPA_PARAM_OP_COPY) |
262 adj = &adjustments[i]; | 604 { |
263 | 605 unsigned index = apm->base_index; |
264 if (adj->op == IPA_PARM_OP_COPY) | 606 if (index >= old_nargs) |
265 { | 607 /* Can happen if the original call has argument mismatch, |
266 tree arg = gimple_call_arg (stmt, adj->base_index); | 608 ignore. */ |
609 continue; | |
610 if (transitive_remapping) | |
611 index = index_map[apm->base_index]; | |
612 | |
613 tree arg = gimple_call_arg (stmt, index); | |
267 | 614 |
268 vargs.quick_push (arg); | 615 vargs.quick_push (arg); |
269 } | 616 kept[index] = true; |
270 else if (adj->op != IPA_PARM_OP_REMOVE) | 617 continue; |
271 { | 618 } |
272 tree expr, base, off; | 619 |
273 location_t loc; | 620 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself. |
274 unsigned int deref_align = 0; | 621 If we ever want to support it during WPA IPA stage, we'll need a |
275 bool deref_base = false; | 622 mechanism to call into the IPA passes that introduced them. Currently |
276 | 623 we simply mandate that IPA infrastructure understands all argument |
277 /* We create a new parameter out of the value of the old one, we can | 624 modifications. Remember, edge redirection/modification is done only |
278 do the following kind of transformations: | 625 once, not in steps for each pass modifying the callee like clone |
279 | 626 materialization. */ |
280 - A scalar passed by reference is converted to a scalar passed by | 627 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT); |
281 value. (adj->by_ref is false and the type of the original | 628 |
282 actual argument is a pointer to a scalar). | 629 /* We have to handle transitive changes differently using the maps we |
283 | 630 have created before. So look into them first. */ |
284 - A part of an aggregate is passed instead of the whole aggregate. | 631 tree repl = NULL_TREE; |
285 The part can be passed either by value or by reference, this is | 632 for (unsigned j = 0; j < trans_map.length (); j++) |
286 determined by value of adj->by_ref. Moreover, the code below | 633 if (trans_map[j].base_index == apm->base_index |
287 handles both situations when the original aggregate is passed by | 634 && trans_map[j].unit_offset == apm->unit_offset) |
288 value (its type is not a pointer) and when it is passed by | 635 { |
289 reference (it is a pointer to an aggregate). | 636 repl = trans_map[j].repl; |
290 | 637 break; |
291 When the new argument is passed by reference (adj->by_ref is true) | 638 } |
292 it must be a part of an aggregate and therefore we form it by | 639 if (repl) |
293 simply taking the address of a reference inside the original | 640 { |
294 aggregate. */ | 641 vargs.quick_push (repl); |
295 | 642 continue; |
296 poly_int64 byte_offset = exact_div (adj->offset, BITS_PER_UNIT); | 643 } |
297 base = gimple_call_arg (stmt, adj->base_index); | 644 |
298 loc = gimple_location (stmt); | 645 unsigned index = apm->base_index; |
299 | 646 if (index >= old_nargs) |
300 if (TREE_CODE (base) != ADDR_EXPR | 647 /* Can happen if the original call has argument mismatch, ignore. */ |
301 && POINTER_TYPE_P (TREE_TYPE (base))) | 648 continue; |
302 off = build_int_cst (adj->alias_ptr_type, byte_offset); | 649 if (transitive_remapping) |
650 index = index_map[apm->base_index]; | |
651 tree base = gimple_call_arg (stmt, index); | |
652 | |
653 /* We create a new parameter out of the value of the old one, we can | |
654 do the following kind of transformations: | |
655 | |
656 - A scalar passed by reference, potentially as a part of a larger | |
657 aggregate, is converted to a scalar passed by value. | |
658 | |
659 - A part of an aggregate is passed instead of the whole aggregate. */ | |
660 | |
661 location_t loc = gimple_location (stmt); | |
662 tree off; | |
663 bool deref_base = false; | |
664 unsigned int deref_align = 0; | |
665 if (TREE_CODE (base) != ADDR_EXPR | |
666 && is_gimple_reg_type (TREE_TYPE (base))) | |
667 { | |
668 /* Detect type mismatches in calls in invalid programs and make a | |
669 poor attempt to gracefully convert them so that we don't ICE. */ | |
670 if (!POINTER_TYPE_P (TREE_TYPE (base))) | |
671 base = force_value_to_type (ptr_type_node, base); | |
672 | |
673 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset); | |
674 } | |
675 else | |
676 { | |
677 bool addrof; | |
678 if (TREE_CODE (base) == ADDR_EXPR) | |
679 { | |
680 base = TREE_OPERAND (base, 0); | |
681 addrof = true; | |
682 } | |
303 else | 683 else |
304 { | 684 addrof = false; |
305 poly_int64 base_offset; | 685 |
306 tree prev_base; | 686 tree prev_base = base; |
307 bool addrof; | 687 poly_int64 base_offset; |
308 | 688 base = get_addr_base_and_unit_offset (base, &base_offset); |
309 if (TREE_CODE (base) == ADDR_EXPR) | 689 |
690 /* Aggregate arguments can have non-invariant addresses. */ | |
691 if (!base) | |
692 { | |
693 base = build_fold_addr_expr (prev_base); | |
694 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset); | |
695 } | |
696 else if (TREE_CODE (base) == MEM_REF) | |
697 { | |
698 if (!addrof) | |
310 { | 699 { |
311 base = TREE_OPERAND (base, 0); | 700 deref_base = true; |
312 addrof = true; | 701 deref_align = TYPE_ALIGN (TREE_TYPE (base)); |
313 } | 702 } |
314 else | 703 off = build_int_cst (apm->alias_ptr_type, |
315 addrof = false; | 704 base_offset + apm->unit_offset); |
316 prev_base = base; | 705 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), |
317 base = get_addr_base_and_unit_offset (base, &base_offset); | 706 off); |
318 /* Aggregate arguments can have non-invariant addresses. */ | 707 base = TREE_OPERAND (base, 0); |
319 if (!base) | |
320 { | |
321 base = build_fold_addr_expr (prev_base); | |
322 off = build_int_cst (adj->alias_ptr_type, byte_offset); | |
323 } | |
324 else if (TREE_CODE (base) == MEM_REF) | |
325 { | |
326 if (!addrof) | |
327 { | |
328 deref_base = true; | |
329 deref_align = TYPE_ALIGN (TREE_TYPE (base)); | |
330 } | |
331 off = build_int_cst (adj->alias_ptr_type, | |
332 base_offset + byte_offset); | |
333 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), | |
334 off); | |
335 base = TREE_OPERAND (base, 0); | |
336 } | |
337 else | |
338 { | |
339 off = build_int_cst (adj->alias_ptr_type, | |
340 base_offset + byte_offset); | |
341 base = build_fold_addr_expr (base); | |
342 } | |
343 } | |
344 | |
345 if (!adj->by_ref) | |
346 { | |
347 tree type = adj->type; | |
348 unsigned int align; | |
349 unsigned HOST_WIDE_INT misalign; | |
350 | |
351 if (deref_base) | |
352 { | |
353 align = deref_align; | |
354 misalign = 0; | |
355 } | |
356 else | |
357 { | |
358 get_pointer_alignment_1 (base, &align, &misalign); | |
359 if (TYPE_ALIGN (type) > align) | |
360 align = TYPE_ALIGN (type); | |
361 } | |
362 misalign += (offset_int::from (wi::to_wide (off), | |
363 SIGNED).to_short_addr () | |
364 * BITS_PER_UNIT); | |
365 misalign = misalign & (align - 1); | |
366 if (misalign != 0) | |
367 align = least_bit_hwi (misalign); | |
368 if (align < TYPE_ALIGN (type)) | |
369 type = build_aligned_type (type, align); | |
370 base = force_gimple_operand_gsi (&gsi, base, | |
371 true, NULL, true, GSI_SAME_STMT); | |
372 expr = fold_build2_loc (loc, MEM_REF, type, base, off); | |
373 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse; | |
374 /* If expr is not a valid gimple call argument emit | |
375 a load into a temporary. */ | |
376 if (is_gimple_reg_type (TREE_TYPE (expr))) | |
377 { | |
378 gimple *tem = gimple_build_assign (NULL_TREE, expr); | |
379 if (gimple_in_ssa_p (cfun)) | |
380 { | |
381 gimple_set_vuse (tem, gimple_vuse (stmt)); | |
382 expr = make_ssa_name (TREE_TYPE (expr), tem); | |
383 } | |
384 else | |
385 expr = create_tmp_reg (TREE_TYPE (expr)); | |
386 gimple_assign_set_lhs (tem, expr); | |
387 gimple_set_location (tem, loc); | |
388 gsi_insert_before (&gsi, tem, GSI_SAME_STMT); | |
389 } | |
390 } | 708 } |
391 else | 709 else |
392 { | 710 { |
393 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off); | 711 off = build_int_cst (apm->alias_ptr_type, |
394 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse; | 712 base_offset + apm->unit_offset); |
395 expr = build_fold_addr_expr (expr); | 713 base = build_fold_addr_expr (base); |
396 expr = force_gimple_operand_gsi (&gsi, expr, | 714 } |
397 true, NULL, true, GSI_SAME_STMT); | 715 } |
398 } | 716 |
399 vargs.quick_push (expr); | 717 tree type = apm->type; |
400 } | 718 unsigned int align; |
401 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_BIND_STMTS) | 719 unsigned HOST_WIDE_INT misalign; |
402 { | 720 |
403 unsigned int ix; | 721 if (deref_base) |
404 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg; | 722 { |
405 gimple *def_temp; | 723 align = deref_align; |
406 | 724 misalign = 0; |
407 arg = gimple_call_arg (stmt, adj->base_index); | 725 } |
726 else | |
727 { | |
728 get_pointer_alignment_1 (base, &align, &misalign); | |
729 /* All users must make sure that we can be optimistic when it | |
730 comes to alignment in this case (by inspecting the final users | |
731 of these new parameters). */ | |
732 if (TYPE_ALIGN (type) > align) | |
733 align = TYPE_ALIGN (type); | |
734 } | |
735 misalign | |
736 += (offset_int::from (wi::to_wide (off), SIGNED).to_short_addr () | |
737 * BITS_PER_UNIT); | |
738 misalign = misalign & (align - 1); | |
739 if (misalign != 0) | |
740 align = least_bit_hwi (misalign); | |
741 if (align < TYPE_ALIGN (type)) | |
742 type = build_aligned_type (type, align); | |
743 base = force_gimple_operand_gsi (&gsi, base, | |
744 true, NULL, true, GSI_SAME_STMT); | |
745 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off); | |
746 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse; | |
747 /* If expr is not a valid gimple call argument emit | |
748 a load into a temporary. */ | |
749 if (is_gimple_reg_type (TREE_TYPE (expr))) | |
750 { | |
751 gimple *tem = gimple_build_assign (NULL_TREE, expr); | |
752 if (gimple_in_ssa_p (cfun)) | |
753 { | |
754 gimple_set_vuse (tem, gimple_vuse (stmt)); | |
755 expr = make_ssa_name (TREE_TYPE (expr), tem); | |
756 } | |
757 else | |
758 expr = create_tmp_reg (TREE_TYPE (expr)); | |
759 gimple_assign_set_lhs (tem, expr); | |
760 gsi_insert_before (&gsi, tem, GSI_SAME_STMT); | |
761 } | |
762 vargs.quick_push (expr); | |
763 } | |
764 | |
765 if (m_always_copy_start >= 0) | |
766 for (unsigned i = m_always_copy_start; i < old_nargs; i++) | |
767 vargs.safe_push (gimple_call_arg (stmt, i)); | |
768 | |
769 /* For optimized away parameters, add on the caller side | |
770 before the call | |
771 DEBUG D#X => parm_Y(D) | |
772 stmts and associate D#X with parm in decl_debug_args_lookup | |
773 vector to say for debug info that if parameter parm had been passed, | |
774 it would have value parm_Y(D). */ | |
775 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl) | |
776 { | |
777 vec<tree, va_gc> **debug_args = NULL; | |
778 unsigned i = 0; | |
779 for (tree old_parm = DECL_ARGUMENTS (old_decl); | |
780 old_parm && i < old_nargs && ((int) i) < m_always_copy_start; | |
781 old_parm = DECL_CHAIN (old_parm), i++) | |
782 { | |
783 if (!is_gimple_reg (old_parm) || kept[i]) | |
784 continue; | |
785 tree origin = DECL_ORIGIN (old_parm); | |
786 tree arg = gimple_call_arg (stmt, i); | |
787 | |
408 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg))) | 788 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg))) |
409 { | 789 { |
410 if (!fold_convertible_p (TREE_TYPE (origin), arg)) | 790 if (!fold_convertible_p (TREE_TYPE (origin), arg)) |
411 continue; | 791 continue; |
412 arg = fold_convert_loc (gimple_location (stmt), | 792 tree rhs1; |
413 TREE_TYPE (origin), arg); | 793 if (TREE_CODE (arg) == SSA_NAME |
794 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg)) | |
795 && (rhs1 | |
796 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg))) | |
797 && useless_type_conversion_p (TREE_TYPE (origin), | |
798 TREE_TYPE (rhs1))) | |
799 arg = rhs1; | |
800 else | |
801 arg = fold_convert_loc (gimple_location (stmt), | |
802 TREE_TYPE (origin), arg); | |
414 } | 803 } |
415 if (debug_args == NULL) | 804 if (debug_args == NULL) |
416 debug_args = decl_debug_args_insert (callee_decl); | 805 debug_args = decl_debug_args_insert (callee_decl); |
806 unsigned int ix; | |
807 tree ddecl = NULL_TREE; | |
417 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2) | 808 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2) |
418 if (ddecl == origin) | 809 if (ddecl == origin) |
419 { | 810 { |
420 ddecl = (**debug_args)[ix + 1]; | 811 ddecl = (**debug_args)[ix + 1]; |
421 break; | 812 break; |
428 SET_DECL_MODE (ddecl, DECL_MODE (origin)); | 819 SET_DECL_MODE (ddecl, DECL_MODE (origin)); |
429 | 820 |
430 vec_safe_push (*debug_args, origin); | 821 vec_safe_push (*debug_args, origin); |
431 vec_safe_push (*debug_args, ddecl); | 822 vec_safe_push (*debug_args, ddecl); |
432 } | 823 } |
433 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt); | 824 gimple *def_temp = gimple_build_debug_bind (ddecl, |
825 unshare_expr (arg), stmt); | |
434 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT); | 826 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT); |
435 } | 827 } |
436 } | 828 } |
437 | 829 |
438 if (dump_file && (dump_flags & TDF_DETAILS)) | 830 if (dump_file && (dump_flags & TDF_DETAILS)) |
439 { | 831 { |
440 fprintf (dump_file, "replacing stmt:"); | 832 fprintf (dump_file, "replacing stmt:"); |
441 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0); | 833 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0); |
442 } | 834 } |
443 | 835 |
444 new_stmt = gimple_build_call_vec (callee_decl, vargs); | 836 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs); |
445 vargs.release (); | 837 |
446 if (gimple_call_lhs (stmt)) | 838 if (tree lhs = gimple_call_lhs (stmt)) |
447 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | 839 { |
840 if (!m_skip_return) | |
841 gimple_call_set_lhs (new_stmt, lhs); | |
842 else if (TREE_CODE (lhs) == SSA_NAME) | |
843 { | |
844 /* LHS should now by a default-def SSA. Unfortunately default-def | |
845 SSA_NAMEs need a backing variable (or at least some code examining | |
846 SSAs assumes it is non-NULL). So we either have to re-use the | |
847 decl we have at hand or introdice a new one. */ | |
848 tree repl = create_tmp_var (TREE_TYPE (lhs), "removed_return"); | |
849 repl = get_or_create_ssa_default_def (cfun, repl); | |
850 SSA_NAME_IS_DEFAULT_DEF (repl) = true; | |
851 imm_use_iterator ui; | |
852 use_operand_p use_p; | |
853 gimple *using_stmt; | |
854 FOR_EACH_IMM_USE_STMT (using_stmt, ui, lhs) | |
855 { | |
856 FOR_EACH_IMM_USE_ON_STMT (use_p, ui) | |
857 { | |
858 SET_USE (use_p, repl); | |
859 } | |
860 update_stmt (using_stmt); | |
861 } | |
862 } | |
863 } | |
448 | 864 |
449 gimple_set_block (new_stmt, gimple_block (stmt)); | 865 gimple_set_block (new_stmt, gimple_block (stmt)); |
450 if (gimple_has_location (stmt)) | 866 if (gimple_has_location (stmt)) |
451 gimple_set_location (new_stmt, gimple_location (stmt)); | 867 gimple_set_location (new_stmt, gimple_location (stmt)); |
452 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); | 868 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); |
453 gimple_call_copy_flags (new_stmt, stmt); | 869 gimple_call_copy_flags (new_stmt, stmt); |
454 if (gimple_in_ssa_p (cfun)) | 870 if (gimple_in_ssa_p (cfun)) |
455 { | 871 gimple_move_vops (new_stmt, stmt); |
456 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); | |
457 if (gimple_vdef (stmt)) | |
458 { | |
459 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
460 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; | |
461 } | |
462 } | |
463 | 872 |
464 if (dump_file && (dump_flags & TDF_DETAILS)) | 873 if (dump_file && (dump_flags & TDF_DETAILS)) |
465 { | 874 { |
466 fprintf (dump_file, "with stmt:"); | 875 fprintf (dump_file, "with stmt:"); |
467 print_gimple_stmt (dump_file, new_stmt, 0); | 876 print_gimple_stmt (dump_file, new_stmt, 0); |
468 fprintf (dump_file, "\n"); | 877 fprintf (dump_file, "\n"); |
469 } | 878 } |
470 gsi_replace (&gsi, new_stmt, true); | 879 gsi_replace (&gsi, new_stmt, true); |
471 if (cs) | 880 if (update_references) |
472 cs->set_call_stmt (new_stmt); | 881 do |
473 do | 882 { |
474 { | 883 current_node->record_stmt_references (gsi_stmt (gsi)); |
475 current_node->record_stmt_references (gsi_stmt (gsi)); | 884 gsi_prev (&gsi); |
476 gsi_prev (&gsi); | 885 } |
477 } | 886 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi)); |
478 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi)); | 887 return new_stmt; |
479 } | 888 } |
480 | 889 |
481 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */ | 890 /* Dump information contained in the object in textual form to F. */ |
482 | 891 |
483 static bool | 892 void |
484 index_in_adjustments_multiple_times_p (int base_index, | 893 ipa_param_adjustments::dump (FILE *f) |
485 ipa_parm_adjustment_vec adjustments) | 894 { |
486 { | 895 fprintf (f, " m_always_copy_start: %i\n", m_always_copy_start); |
487 int i, len = adjustments.length (); | 896 ipa_dump_adjusted_parameters (f, m_adj_params); |
488 bool one = false; | 897 if (m_skip_return) |
489 | 898 fprintf (f, " Will SKIP return.\n"); |
490 for (i = 0; i < len; i++) | 899 } |
491 { | 900 |
492 struct ipa_parm_adjustment *adj; | 901 /* Dump information contained in the object in textual form to stderr. */ |
493 adj = &adjustments[i]; | 902 |
494 | 903 void |
495 if (adj->base_index == base_index) | 904 ipa_param_adjustments::debug () |
496 { | 905 { |
497 if (one) | 906 dump (stderr); |
498 return true; | 907 } |
499 else | 908 |
500 one = true; | 909 /* Register that REPLACEMENT should replace parameter described in APM and |
501 } | 910 optionally as DUMMY to mark transitive splits across calls. */ |
502 } | 911 |
503 return false; | 912 void |
504 } | 913 ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm, |
505 | 914 tree replacement, |
506 /* Return adjustments that should have the same effect on function parameters | 915 tree dummy) |
507 and call arguments as if they were first changed according to adjustments in | 916 { |
508 INNER and then by adjustments in OUTER. */ | 917 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT |
509 | 918 || apm->op == IPA_PARAM_OP_NEW); |
510 ipa_parm_adjustment_vec | 919 gcc_checking_assert (!apm->prev_clone_adjustment); |
511 ipa_combine_adjustments (ipa_parm_adjustment_vec inner, | 920 ipa_param_body_replacement psr; |
512 ipa_parm_adjustment_vec outer) | 921 psr.base = m_oparms[apm->prev_clone_index]; |
513 { | 922 psr.repl = replacement; |
514 int i, outlen = outer.length (); | 923 psr.dummy = dummy; |
515 int inlen = inner.length (); | 924 psr.unit_offset = apm->unit_offset; |
516 int removals = 0; | 925 m_replacements.safe_push (psr); |
517 ipa_parm_adjustment_vec adjustments, tmp; | 926 } |
518 | 927 |
519 tmp.create (inlen); | 928 /* Copy or not, as appropriate given m_id and decl context, a pre-existing |
520 for (i = 0; i < inlen; i++) | 929 PARM_DECL T so that it can be included in the parameters of the modified |
521 { | 930 function. */ |
522 struct ipa_parm_adjustment *n; | 931 |
523 n = &inner[i]; | 932 tree |
524 | 933 ipa_param_body_adjustments::carry_over_param (tree t) |
525 if (n->op == IPA_PARM_OP_REMOVE) | 934 { |
526 removals++; | 935 tree new_parm; |
936 if (m_id) | |
937 { | |
938 new_parm = remap_decl (t, m_id); | |
939 if (TREE_CODE (new_parm) != PARM_DECL) | |
940 new_parm = m_id->copy_decl (t, m_id); | |
941 } | |
942 else if (DECL_CONTEXT (t) != m_fndecl) | |
943 { | |
944 new_parm = copy_node (t); | |
945 DECL_CONTEXT (new_parm) = m_fndecl; | |
946 } | |
947 else | |
948 new_parm = t; | |
949 return new_parm; | |
950 } | |
951 | |
952 /* Common initialization performed by all ipa_param_body_adjustments | |
953 constructors. OLD_FNDECL is the declaration we take original arguments | |
954 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to | |
955 a chained list of new local variables. TREE_MAP is the IPA-CP produced | |
956 mapping of trees to constants. | |
957 | |
958 The function is rather long but it really onlu initializes all data members | |
959 of the class. It creates new param DECLs, finds their new types, */ | |
960 | |
961 void | |
962 ipa_param_body_adjustments::common_initialization (tree old_fndecl, | |
963 tree *vars, | |
964 vec<ipa_replace_map *, | |
965 va_gc> *tree_map) | |
966 { | |
967 push_function_arg_decls (&m_oparms, old_fndecl); | |
968 auto_vec<tree,16> otypes; | |
969 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE) | |
970 push_function_arg_types (&otypes, TREE_TYPE (old_fndecl)); | |
971 else | |
972 { | |
973 auto_vec<tree,16> oparms; | |
974 push_function_arg_decls (&oparms, old_fndecl); | |
975 unsigned ocount = oparms.length (); | |
976 otypes.reserve_exact (ocount); | |
977 for (unsigned i = 0; i < ocount; i++) | |
978 otypes.quick_push (TREE_TYPE (oparms[i])); | |
979 } | |
980 fill_vector_of_new_param_types (&m_new_types, &otypes, m_adj_params, true); | |
981 | |
982 auto_vec<bool, 16> kept; | |
983 kept.reserve_exact (m_oparms.length ()); | |
984 kept.quick_grow_cleared (m_oparms.length ()); | |
985 auto_vec<tree, 16> isra_dummy_decls; | |
986 isra_dummy_decls.reserve_exact (m_oparms.length ()); | |
987 isra_dummy_decls.quick_grow_cleared (m_oparms.length ()); | |
988 | |
989 unsigned adj_len = vec_safe_length (m_adj_params); | |
990 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE) | |
991 && (adj_len == 0 | |
992 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY | |
993 || (*m_adj_params)[0].base_index != 0)); | |
994 | |
995 /* The main job of the this function is to go over the vector of adjusted | |
996 parameters and create declarations or find corresponding old ones and push | |
997 them to m_new_decls. For IPA-SRA replacements it also creates | |
998 corresponding m_id->dst_node->clone.performed_splits entries. */ | |
999 | |
1000 m_new_decls.reserve_exact (adj_len); | |
1001 for (unsigned i = 0; i < adj_len ; i++) | |
1002 { | |
1003 ipa_adjusted_param *apm = &(*m_adj_params)[i]; | |
1004 unsigned prev_index = apm->prev_clone_index; | |
1005 tree new_parm; | |
1006 if (apm->op == IPA_PARAM_OP_COPY | |
1007 || apm->prev_clone_adjustment) | |
1008 { | |
1009 kept[prev_index] = true; | |
1010 new_parm = carry_over_param (m_oparms[prev_index]); | |
1011 m_new_decls.quick_push (new_parm); | |
1012 } | |
1013 else if (apm->op == IPA_PARAM_OP_NEW | |
1014 || apm->op == IPA_PARAM_OP_SPLIT) | |
1015 { | |
1016 tree new_type = m_new_types[i]; | |
1017 gcc_checking_assert (new_type); | |
1018 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE, | |
1019 new_type); | |
1020 const char *prefix = ipa_param_prefixes[apm->param_prefix_index]; | |
1021 DECL_NAME (new_parm) = create_tmp_var_name (prefix); | |
1022 DECL_ARTIFICIAL (new_parm) = 1; | |
1023 DECL_ARG_TYPE (new_parm) = new_type; | |
1024 DECL_CONTEXT (new_parm) = m_fndecl; | |
1025 TREE_USED (new_parm) = 1; | |
1026 DECL_IGNORED_P (new_parm) = 1; | |
1027 /* We assume all newly created arguments are not addressable. */ | |
1028 if (TREE_CODE (new_type) == COMPLEX_TYPE | |
1029 || TREE_CODE (new_type) == VECTOR_TYPE) | |
1030 DECL_GIMPLE_REG_P (new_parm) = 1; | |
1031 layout_decl (new_parm, 0); | |
1032 m_new_decls.quick_push (new_parm); | |
1033 | |
1034 if (apm->op == IPA_PARAM_OP_SPLIT) | |
1035 { | |
1036 m_split_modifications_p = true; | |
1037 | |
1038 if (m_id) | |
1039 { | |
1040 tree dummy_decl; | |
1041 if (!isra_dummy_decls[prev_index]) | |
1042 { | |
1043 dummy_decl = copy_decl_to_var (m_oparms[prev_index], | |
1044 m_id); | |
1045 /* Any attempt to remap this dummy in this particular | |
1046 instance of clone materialization should yield | |
1047 itself. */ | |
1048 insert_decl_map (m_id, dummy_decl, dummy_decl); | |
1049 | |
1050 DECL_CHAIN (dummy_decl) = *vars; | |
1051 *vars = dummy_decl; | |
1052 isra_dummy_decls[prev_index] = dummy_decl; | |
1053 } | |
1054 else | |
1055 dummy_decl = isra_dummy_decls[prev_index]; | |
1056 | |
1057 register_replacement (apm, new_parm, dummy_decl); | |
1058 ipa_param_performed_split ps; | |
1059 ps.dummy_decl = dummy_decl; | |
1060 ps.unit_offset = apm->unit_offset; | |
1061 vec_safe_push (m_id->dst_node->clone.performed_splits, ps); | |
1062 } | |
1063 else | |
1064 register_replacement (apm, new_parm); | |
1065 } | |
1066 } | |
527 else | 1067 else |
528 { | 1068 gcc_unreachable (); |
529 /* FIXME: Handling of new arguments are not implemented yet. */ | 1069 } |
530 gcc_assert (n->op != IPA_PARM_OP_NEW); | 1070 |
531 tmp.quick_push (*n); | 1071 |
532 } | 1072 /* As part of body modifications, we will also have to replace remaining uses |
533 } | 1073 of remaining uses of removed PARM_DECLs (which do not however use the |
534 | 1074 initial value) with their VAR_DECL copies. |
535 adjustments.create (outlen + removals); | 1075 |
536 for (i = 0; i < outlen; i++) | 1076 We do this differently with and without m_id. With m_id, we rely on its |
537 { | 1077 mapping and create a replacement straight away. Without it, we have our |
538 struct ipa_parm_adjustment r; | 1078 own mechanism for which we have to populate m_removed_decls vector. Just |
539 struct ipa_parm_adjustment *out = &outer[i]; | 1079 don't mix them, that is why you should not call |
540 struct ipa_parm_adjustment *in = &tmp[out->base_index]; | 1080 replace_removed_params_ssa_names or perform_cfun_body_modifications when |
541 | 1081 you construct with ID not equal to NULL. */ |
542 memset (&r, 0, sizeof (r)); | 1082 |
543 gcc_assert (in->op != IPA_PARM_OP_REMOVE); | 1083 unsigned op_len = m_oparms.length (); |
544 if (out->op == IPA_PARM_OP_REMOVE) | 1084 for (unsigned i = 0; i < op_len; i++) |
545 { | 1085 if (!kept[i]) |
546 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp)) | 1086 { |
547 { | 1087 if (m_id) |
548 r.op = IPA_PARM_OP_REMOVE; | 1088 { |
549 adjustments.quick_push (r); | 1089 if (!m_id->decl_map->get (m_oparms[i])) |
550 } | 1090 { |
551 continue; | 1091 /* TODO: Perhaps at least aggregate-type params could re-use |
552 } | 1092 their isra_dummy_decl here? */ |
553 else | 1093 tree var = copy_decl_to_var (m_oparms[i], m_id); |
554 { | 1094 insert_decl_map (m_id, m_oparms[i], var); |
555 /* FIXME: Handling of new arguments are not implemented yet. */ | 1095 /* Declare this new variable. */ |
556 gcc_assert (out->op != IPA_PARM_OP_NEW); | 1096 DECL_CHAIN (var) = *vars; |
557 } | 1097 *vars = var; |
558 | 1098 } |
559 r.base_index = in->base_index; | 1099 } |
560 r.type = out->type; | 1100 else |
561 | 1101 { |
562 /* FIXME: Create nonlocal value too. */ | 1102 m_removed_decls.safe_push (m_oparms[i]); |
563 | 1103 m_removed_map.put (m_oparms[i], m_removed_decls.length () - 1); |
564 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY) | 1104 } |
565 r.op = IPA_PARM_OP_COPY; | 1105 } |
566 else if (in->op == IPA_PARM_OP_COPY) | 1106 |
567 r.offset = out->offset; | 1107 if (!MAY_HAVE_DEBUG_STMTS) |
568 else if (out->op == IPA_PARM_OP_COPY) | 1108 return; |
569 r.offset = in->offset; | 1109 |
570 else | 1110 /* Finally, when generating debug info, we fill vector m_reset_debug_decls |
571 r.offset = in->offset + out->offset; | 1111 with removed parameters declarations. We do this in order to re-map their |
572 adjustments.quick_push (r); | 1112 debug bind statements and create debug decls for them. */ |
573 } | 1113 |
574 | 1114 if (tree_map) |
575 for (i = 0; i < inlen; i++) | 1115 { |
576 { | 1116 /* Do not output debuginfo for parameter declarations as if they vanished |
577 struct ipa_parm_adjustment *n = &inner[i]; | 1117 when they were in fact replaced by a constant. */ |
578 | 1118 auto_vec <int, 16> index_mapping; |
579 if (n->op == IPA_PARM_OP_REMOVE) | 1119 bool need_remap = false; |
580 adjustments.quick_push (*n); | 1120 |
581 } | 1121 if (m_id && m_id->src_node->clone.param_adjustments) |
582 | 1122 { |
583 tmp.release (); | 1123 ipa_param_adjustments *prev_adjustments |
584 return adjustments; | 1124 = m_id->src_node->clone.param_adjustments; |
1125 prev_adjustments->get_updated_indices (&index_mapping); | |
1126 need_remap = true; | |
1127 } | |
1128 | |
1129 for (unsigned i = 0; i < tree_map->length (); i++) | |
1130 { | |
1131 int parm_num = (*tree_map)[i]->parm_num; | |
1132 gcc_assert (parm_num >= 0); | |
1133 if (need_remap) | |
1134 parm_num = index_mapping[parm_num]; | |
1135 kept[parm_num] = true; | |
1136 } | |
1137 } | |
1138 | |
1139 for (unsigned i = 0; i < op_len; i++) | |
1140 if (!kept[i] && is_gimple_reg (m_oparms[i])) | |
1141 m_reset_debug_decls.safe_push (m_oparms[i]); | |
1142 } | |
1143 | |
1144 /* Constructor of ipa_param_body_adjustments from a simple list of | |
1145 modifications to parameters listed in ADJ_PARAMS which will prepare ground | |
1146 for modification of parameters of fndecl. Return value of the function will | |
1147 not be removed and the object will assume it does not run as a part of | |
1148 tree-function_versioning. */ | |
1149 | |
1150 ipa_param_body_adjustments | |
1151 ::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params, | |
1152 tree fndecl) | |
1153 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (), | |
1154 m_split_modifications_p (false), m_fndecl (fndecl), m_id (NULL), | |
1155 m_oparms (), m_new_decls (), m_new_types (), m_replacements (), | |
1156 m_removed_decls (), m_removed_map (), m_method2func (false) | |
1157 { | |
1158 common_initialization (fndecl, NULL, NULL); | |
1159 } | |
1160 | |
1161 /* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in | |
1162 ADJUSTMENTS which will prepare ground for modification of parameters of | |
1163 fndecl. The object will assume it does not run as a part of | |
1164 tree-function_versioning. */ | |
1165 | |
1166 ipa_param_body_adjustments | |
1167 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments, | |
1168 tree fndecl) | |
1169 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments), | |
1170 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl), | |
1171 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (), | |
1172 m_replacements (), m_removed_decls (), m_removed_map (), | |
1173 m_method2func (false) | |
1174 { | |
1175 common_initialization (fndecl, NULL, NULL); | |
1176 } | |
1177 | |
1178 /* Constructor of ipa_param_body_adjustments which sets it up as a part of | |
1179 running tree_function_versioning. Planned modifications to the function are | |
1180 in ADJUSTMENTS. FNDECL designates the new function clone which is being | |
1181 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which | |
1182 at the time of invocation still share DECL_ARGUMENTS). ID is the | |
1183 copy_body_data structure driving the wholy body copying process. VARS is a | |
1184 pointer to the head of the list of new local variables, TREE_MAP is the map | |
1185 that drives tree substitution in the cloning process. */ | |
1186 | |
1187 ipa_param_body_adjustments | |
1188 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments, | |
1189 tree fndecl, tree old_fndecl, | |
1190 copy_body_data *id, tree *vars, | |
1191 vec<ipa_replace_map *, va_gc> *tree_map) | |
1192 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments), | |
1193 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl), | |
1194 m_id (id), m_oparms (), m_new_decls (), m_new_types (), m_replacements (), | |
1195 m_removed_decls (), m_removed_map (), m_method2func (false) | |
1196 { | |
1197 common_initialization (old_fndecl, vars, tree_map); | |
1198 } | |
1199 | |
1200 /* Chain new param decls up and return them. */ | |
1201 | |
1202 tree | |
1203 ipa_param_body_adjustments::get_new_param_chain () | |
1204 { | |
1205 tree result; | |
1206 tree *link = &result; | |
1207 | |
1208 unsigned len = vec_safe_length (m_adj_params); | |
1209 for (unsigned i = 0; i < len; i++) | |
1210 { | |
1211 tree new_decl = m_new_decls[i]; | |
1212 *link = new_decl; | |
1213 link = &DECL_CHAIN (new_decl); | |
1214 } | |
1215 *link = NULL_TREE; | |
1216 return result; | |
1217 } | |
1218 | |
1219 /* Modify the function parameters FNDECL and its type according to the plan in | |
1220 ADJUSTMENTS. This function needs to be called when the decl has not already | |
1221 been processed with ipa_param_adjustments::adjust_decl, otherwise just | |
1222 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */ | |
1223 | |
1224 void | |
1225 ipa_param_body_adjustments::modify_formal_parameters () | |
1226 { | |
1227 tree orig_type = TREE_TYPE (m_fndecl); | |
1228 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain (); | |
1229 | |
1230 /* When signature changes, we need to clear builtin info. */ | |
1231 if (fndecl_built_in_p (m_fndecl)) | |
1232 set_decl_built_in_function (m_fndecl, NOT_BUILT_IN, 0); | |
1233 | |
1234 /* At this point, removing return value is only implemented when going | |
1235 through tree_function_versioning, not when modifying function body | |
1236 directly. */ | |
1237 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return); | |
1238 tree new_type = build_adjusted_function_type (orig_type, &m_new_types, | |
1239 m_method2func, false); | |
1240 | |
1241 TREE_TYPE (m_fndecl) = new_type; | |
1242 DECL_VIRTUAL_P (m_fndecl) = 0; | |
1243 DECL_LANG_SPECIFIC (m_fndecl) = NULL; | |
1244 if (m_method2func) | |
1245 DECL_VINDEX (m_fndecl) = NULL_TREE; | |
1246 } | |
1247 | |
1248 /* Given BASE and UNIT_OFFSET, find the corresponding record among replacement | |
1249 structures. */ | |
1250 | |
1251 ipa_param_body_replacement * | |
1252 ipa_param_body_adjustments::lookup_replacement_1 (tree base, | |
1253 unsigned unit_offset) | |
1254 { | |
1255 unsigned int len = m_replacements.length (); | |
1256 for (unsigned i = 0; i < len; i++) | |
1257 { | |
1258 ipa_param_body_replacement *pbr = &m_replacements[i]; | |
1259 | |
1260 if (pbr->base == base | |
1261 && (pbr->unit_offset == unit_offset)) | |
1262 return pbr; | |
1263 } | |
1264 return NULL; | |
1265 } | |
1266 | |
1267 /* Given BASE and UNIT_OFFSET, find the corresponding replacement expression | |
1268 and return it, assuming it is known it does not hold value by reference or | |
1269 in reverse storage order. */ | |
1270 | |
1271 tree | |
1272 ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset) | |
1273 { | |
1274 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset); | |
1275 if (!pbr) | |
1276 return NULL; | |
1277 return pbr->repl; | |
585 } | 1278 } |
586 | 1279 |
587 /* If T is an SSA_NAME, return NULL if it is not a default def or | 1280 /* If T is an SSA_NAME, return NULL if it is not a default def or |
588 return its base variable if it is. If IGNORE_DEFAULT_DEF is true, | 1281 return its base variable if it is. If IGNORE_DEFAULT_DEF is true, |
589 the base variable is always returned, regardless if it is a default | 1282 the base variable is always returned, regardless if it is a default |
600 return NULL_TREE; | 1293 return NULL_TREE; |
601 } | 1294 } |
602 return t; | 1295 return t; |
603 } | 1296 } |
604 | 1297 |
605 /* Given an expression, return an adjustment entry specifying the | 1298 /* Given an expression, return the structure describing how it should be |
606 transformation to be done on EXPR. If no suitable adjustment entry | 1299 replaced if it accesses a part of a split parameter or NULL otherwise. |
607 was found, returns NULL. | 1300 |
608 | 1301 Do not free the result, it will be deallocated when the object is destroyed. |
609 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a | 1302 |
610 default def, otherwise bail on them. | 1303 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs |
611 | 1304 which are default definitions, if set, consider all SSA_NAMEs of |
612 If CONVERT is non-NULL, this function will set *CONVERT if the | 1305 PARM_DECLs. */ |
613 expression provided is a component reference. ADJUSTMENTS is the | 1306 |
614 adjustments vector. */ | 1307 ipa_param_body_replacement * |
615 | 1308 ipa_param_body_adjustments::get_expr_replacement (tree expr, |
616 ipa_parm_adjustment * | 1309 bool ignore_default_def) |
617 ipa_get_adjustment_candidate (tree **expr, bool *convert, | 1310 { |
618 ipa_parm_adjustment_vec adjustments, | 1311 tree base; |
619 bool ignore_default_def) | 1312 unsigned unit_offset; |
620 { | 1313 |
621 if (TREE_CODE (**expr) == BIT_FIELD_REF | 1314 if (!isra_get_ref_base_and_offset (expr, &base, &unit_offset)) |
622 || TREE_CODE (**expr) == IMAGPART_EXPR | |
623 || TREE_CODE (**expr) == REALPART_EXPR) | |
624 { | |
625 *expr = &TREE_OPERAND (**expr, 0); | |
626 if (convert) | |
627 *convert = true; | |
628 } | |
629 | |
630 poly_int64 offset, size, max_size; | |
631 bool reverse; | |
632 tree base | |
633 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse); | |
634 if (!base || !known_size_p (size) || !known_size_p (max_size)) | |
635 return NULL; | 1315 return NULL; |
636 | |
637 if (TREE_CODE (base) == MEM_REF) | |
638 { | |
639 offset += mem_ref_offset (base).force_shwi () * BITS_PER_UNIT; | |
640 base = TREE_OPERAND (base, 0); | |
641 } | |
642 | 1316 |
643 base = get_ssa_base_param (base, ignore_default_def); | 1317 base = get_ssa_base_param (base, ignore_default_def); |
644 if (!base || TREE_CODE (base) != PARM_DECL) | 1318 if (!base || TREE_CODE (base) != PARM_DECL) |
645 return NULL; | 1319 return NULL; |
646 | 1320 return lookup_replacement_1 (base, unit_offset); |
647 struct ipa_parm_adjustment *cand = NULL; | 1321 } |
648 unsigned int len = adjustments.length (); | 1322 |
649 for (unsigned i = 0; i < len; i++) | 1323 /* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed |
650 { | 1324 (which includes it being split or replaced), return a new variable that |
651 struct ipa_parm_adjustment *adj = &adjustments[i]; | 1325 should be used for any SSA names that will remain in the function that |
652 | 1326 previously belonged to OLD_DECL. */ |
653 if (adj->base == base | 1327 |
654 && (known_eq (adj->offset, offset) || adj->op == IPA_PARM_OP_REMOVE)) | 1328 tree |
655 { | 1329 ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl) |
656 cand = adj; | 1330 { |
657 break; | 1331 unsigned *idx = m_removed_map.get (old_decl); |
658 } | 1332 if (!idx) |
659 } | |
660 | |
661 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE) | |
662 return NULL; | 1333 return NULL; |
663 return cand; | 1334 |
664 } | 1335 tree repl; |
665 | 1336 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL) |
666 /* If the expression *EXPR should be replaced by a reduction of a parameter, do | 1337 { |
667 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT | 1338 gcc_assert (m_removed_decls[*idx] == old_decl); |
668 specifies whether the function should care about type incompatibility the | 1339 repl = copy_var_decl (old_decl, DECL_NAME (old_decl), |
669 current and new expressions. If it is false, the function will leave | 1340 TREE_TYPE (old_decl)); |
670 incompatibility issues to the caller. Return true iff the expression | 1341 m_removed_decls[*idx] = repl; |
671 was modified. */ | 1342 } |
1343 else | |
1344 repl = m_removed_decls[*idx]; | |
1345 return repl; | |
1346 } | |
1347 | |
1348 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a | |
1349 parameter which is to be removed because its value is not used, create a new | |
1350 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the | |
1351 original with it and return it. If there is no need to re-map, return NULL. | |
1352 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */ | |
1353 | |
1354 tree | |
1355 ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name, | |
1356 gimple *stmt) | |
1357 { | |
1358 gcc_assert (!m_id); | |
1359 if (TREE_CODE (old_name) != SSA_NAME) | |
1360 return NULL; | |
1361 | |
1362 tree decl = SSA_NAME_VAR (old_name); | |
1363 if (decl == NULL_TREE | |
1364 || TREE_CODE (decl) != PARM_DECL) | |
1365 return NULL; | |
1366 | |
1367 tree repl = get_replacement_ssa_base (decl); | |
1368 if (!repl) | |
1369 return NULL; | |
1370 | |
1371 tree new_name = make_ssa_name (repl, stmt); | |
1372 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name) | |
1373 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name); | |
1374 | |
1375 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1376 { | |
1377 fprintf (dump_file, "replacing an SSA name of a removed param "); | |
1378 print_generic_expr (dump_file, old_name); | |
1379 fprintf (dump_file, " with "); | |
1380 print_generic_expr (dump_file, new_name); | |
1381 fprintf (dump_file, "\n"); | |
1382 } | |
1383 | |
1384 replace_uses_by (old_name, new_name); | |
1385 return new_name; | |
1386 } | |
1387 | |
1388 /* If the expression *EXPR_P should be replaced, do so. CONVERT specifies | |
1389 whether the function should care about type incompatibility of the current | |
1390 and new expressions. If it is false, the function will leave | |
1391 incompatibility issues to the caller - note that when the function | |
1392 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify | |
1393 their bases instead of the expressions themselves and then also performs any | |
1394 necessary conversions. */ | |
672 | 1395 |
673 bool | 1396 bool |
674 ipa_modify_expr (tree *expr, bool convert, | 1397 ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert) |
675 ipa_parm_adjustment_vec adjustments) | 1398 { |
676 { | 1399 tree expr = *expr_p; |
677 struct ipa_parm_adjustment *cand | 1400 |
678 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false); | 1401 if (TREE_CODE (expr) == BIT_FIELD_REF |
679 if (!cand) | 1402 || TREE_CODE (expr) == IMAGPART_EXPR |
1403 || TREE_CODE (expr) == REALPART_EXPR) | |
1404 { | |
1405 expr_p = &TREE_OPERAND (expr, 0); | |
1406 expr = *expr_p; | |
1407 convert = true; | |
1408 } | |
1409 | |
1410 ipa_param_body_replacement *pbr = get_expr_replacement (expr, false); | |
1411 if (!pbr) | |
680 return false; | 1412 return false; |
681 | 1413 |
682 tree src; | 1414 tree repl = pbr->repl; |
683 if (cand->by_ref) | 1415 if (dump_file && (dump_flags & TDF_DETAILS)) |
684 { | 1416 { |
685 src = build_simple_mem_ref (cand->new_decl); | 1417 fprintf (dump_file, "About to replace expr "); |
686 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse; | 1418 print_generic_expr (dump_file, expr); |
1419 fprintf (dump_file, " with "); | |
1420 print_generic_expr (dump_file, repl); | |
1421 fprintf (dump_file, "\n"); | |
1422 } | |
1423 | |
1424 if (convert && !useless_type_conversion_p (TREE_TYPE (expr), | |
1425 TREE_TYPE (repl))) | |
1426 { | |
1427 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl); | |
1428 *expr_p = vce; | |
687 } | 1429 } |
688 else | 1430 else |
689 src = cand->new_decl; | 1431 *expr_p = repl; |
690 | 1432 return true; |
691 if (dump_file && (dump_flags & TDF_DETAILS)) | 1433 } |
692 { | 1434 |
693 fprintf (dump_file, "About to replace expr "); | 1435 /* If the assignment statement STMT contains any expressions that need to |
694 print_generic_expr (dump_file, *expr); | 1436 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any |
695 fprintf (dump_file, " with "); | 1437 potential type incompatibilities. If any conversion sttements have to be |
696 print_generic_expr (dump_file, src); | 1438 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the |
697 fprintf (dump_file, "\n"); | 1439 statement was modified. */ |
698 } | 1440 |
699 | 1441 bool |
700 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type)) | 1442 ipa_param_body_adjustments::modify_assignment (gimple *stmt, |
701 { | 1443 gimple_seq *extra_stmts) |
702 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src); | 1444 { |
703 *expr = vce; | 1445 tree *lhs_p, *rhs_p; |
704 } | 1446 bool any; |
1447 | |
1448 if (!gimple_assign_single_p (stmt)) | |
1449 return false; | |
1450 | |
1451 rhs_p = gimple_assign_rhs1_ptr (stmt); | |
1452 lhs_p = gimple_assign_lhs_ptr (stmt); | |
1453 | |
1454 any = modify_expression (lhs_p, false); | |
1455 any |= modify_expression (rhs_p, false); | |
1456 if (any | |
1457 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p))) | |
1458 { | |
1459 if (TREE_CODE (*rhs_p) == CONSTRUCTOR) | |
1460 { | |
1461 /* V_C_Es of constructors can cause trouble (PR 42714). */ | |
1462 if (is_gimple_reg_type (TREE_TYPE (*lhs_p))) | |
1463 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); | |
1464 else | |
1465 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), | |
1466 NULL); | |
1467 } | |
1468 else | |
1469 { | |
1470 tree new_rhs = fold_build1_loc (gimple_location (stmt), | |
1471 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p), | |
1472 *rhs_p); | |
1473 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true, | |
1474 NULL_TREE); | |
1475 gimple_assign_set_rhs1 (stmt, tmp); | |
1476 } | |
1477 return true; | |
1478 } | |
1479 | |
1480 return any; | |
1481 } | |
1482 | |
1483 /* Data passed to remap_split_decl_to_dummy through walk_tree. */ | |
1484 | |
1485 struct simple_tree_swap_info | |
1486 { | |
1487 /* Change FROM to TO. */ | |
1488 tree from, to; | |
1489 /* And set DONE to true when doing so. */ | |
1490 bool done; | |
1491 }; | |
1492 | |
1493 /* Simple remapper to remap a split parameter to the same expression based on a | |
1494 special dummy decl so that edge redirections can detect transitive splitting | |
1495 and finish them. */ | |
1496 | |
1497 static tree | |
1498 remap_split_decl_to_dummy (tree *tp, int *walk_subtrees, void *data) | |
1499 { | |
1500 tree t = *tp; | |
1501 | |
1502 if (DECL_P (t) || TREE_CODE (t) == SSA_NAME) | |
1503 { | |
1504 struct simple_tree_swap_info *swapinfo | |
1505 = (struct simple_tree_swap_info *) data; | |
1506 if (t == swapinfo->from | |
1507 || (TREE_CODE (t) == SSA_NAME | |
1508 && SSA_NAME_VAR (t) == swapinfo->from)) | |
1509 { | |
1510 *tp = swapinfo->to; | |
1511 swapinfo->done = true; | |
1512 } | |
1513 *walk_subtrees = 0; | |
1514 } | |
1515 else if (TYPE_P (t)) | |
1516 *walk_subtrees = 0; | |
705 else | 1517 else |
706 *expr = src; | 1518 *walk_subtrees = 1; |
707 return true; | 1519 return NULL_TREE; |
708 } | 1520 } |
709 | 1521 |
710 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human | 1522 |
711 friendly way, assuming they are meant to be applied to FNDECL. */ | 1523 /* If the call statement pointed at by STMT_P contains any expressions that |
1524 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the | |
1525 statement needs to be rebuilt, do so. Return true if any modifications have | |
1526 been performed. | |
1527 | |
1528 If the method is invoked as a part of IPA clone materialization and if any | |
1529 parameter split is transitive, i.e. it applies to the functin that is being | |
1530 modified and also to the callee of the statement, replace the parameter | |
1531 passed to old callee with an equivalent expression based on a dummy decl | |
1532 followed by PARM_DECLs representing the actual replacements. The actual | |
1533 replacements will be then converted into SSA_NAMEs and then | |
1534 ipa_param_adjustments::modify_call will find the appropriate ones and leave | |
1535 only those in the call. */ | |
1536 | |
1537 bool | |
1538 ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p) | |
1539 { | |
1540 gcall *stmt = *stmt_p; | |
1541 auto_vec <unsigned, 4> pass_through_args; | |
1542 auto_vec <unsigned, 4> pass_through_pbr_indices; | |
1543 | |
1544 if (m_split_modifications_p && m_id) | |
1545 { | |
1546 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++) | |
1547 { | |
1548 tree t = gimple_call_arg (stmt, i); | |
1549 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF | |
1550 && TREE_CODE (t) != IMAGPART_EXPR | |
1551 && TREE_CODE (t) != REALPART_EXPR); | |
1552 | |
1553 tree base; | |
1554 unsigned unit_offset; | |
1555 if (!isra_get_ref_base_and_offset (t, &base, &unit_offset)) | |
1556 continue; | |
1557 | |
1558 bool by_ref = false; | |
1559 if (TREE_CODE (base) == SSA_NAME) | |
1560 { | |
1561 if (!SSA_NAME_IS_DEFAULT_DEF (base)) | |
1562 continue; | |
1563 base = SSA_NAME_VAR (base); | |
1564 gcc_checking_assert (base); | |
1565 by_ref = true; | |
1566 } | |
1567 if (TREE_CODE (base) != PARM_DECL) | |
1568 continue; | |
1569 | |
1570 bool base_among_replacements = false; | |
1571 unsigned j, repl_list_len = m_replacements.length (); | |
1572 for (j = 0; j < repl_list_len; j++) | |
1573 { | |
1574 ipa_param_body_replacement *pbr = &m_replacements[j]; | |
1575 if (pbr->base == base) | |
1576 { | |
1577 base_among_replacements = true; | |
1578 break; | |
1579 } | |
1580 } | |
1581 if (!base_among_replacements) | |
1582 continue; | |
1583 | |
1584 /* We still have to distinguish between an end-use that we have to | |
1585 transform now and a pass-through, which happens in the following | |
1586 two cases. */ | |
1587 | |
1588 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider | |
1589 &MEM_REF[ssa_name + offset], we will also have to detect that case | |
1590 here. */ | |
1591 | |
1592 if (TREE_CODE (t) == SSA_NAME | |
1593 && SSA_NAME_IS_DEFAULT_DEF (t) | |
1594 && SSA_NAME_VAR (t) | |
1595 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL) | |
1596 { | |
1597 /* This must be a by_reference pass-through. */ | |
1598 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t))); | |
1599 pass_through_args.safe_push (i); | |
1600 pass_through_pbr_indices.safe_push (j); | |
1601 } | |
1602 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t))) | |
1603 { | |
1604 /* Currently IPA-SRA guarantees the aggregate access type | |
1605 exactly matches in this case. So if it does not match, it is | |
1606 a pass-through argument that will be sorted out at edge | |
1607 redirection time. */ | |
1608 ipa_param_body_replacement *pbr | |
1609 = lookup_replacement_1 (base, unit_offset); | |
1610 | |
1611 if (!pbr | |
1612 || (TYPE_MAIN_VARIANT (TREE_TYPE (t)) | |
1613 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl)))) | |
1614 { | |
1615 pass_through_args.safe_push (i); | |
1616 pass_through_pbr_indices.safe_push (j); | |
1617 } | |
1618 } | |
1619 } | |
1620 } | |
1621 | |
1622 unsigned nargs = gimple_call_num_args (stmt); | |
1623 if (!pass_through_args.is_empty ()) | |
1624 { | |
1625 auto_vec<tree, 16> vargs; | |
1626 unsigned pt_idx = 0; | |
1627 for (unsigned i = 0; i < nargs; i++) | |
1628 { | |
1629 if (pt_idx < pass_through_args.length () | |
1630 && i == pass_through_args[pt_idx]) | |
1631 { | |
1632 unsigned j = pass_through_pbr_indices[pt_idx]; | |
1633 pt_idx++; | |
1634 tree base = m_replacements[j].base; | |
1635 | |
1636 /* Map base will get mapped to the special transitive-isra marker | |
1637 dummy decl. */ | |
1638 struct simple_tree_swap_info swapinfo; | |
1639 swapinfo.from = base; | |
1640 swapinfo.to = m_replacements[j].dummy; | |
1641 swapinfo.done = false; | |
1642 tree arg = gimple_call_arg (stmt, i); | |
1643 walk_tree (&arg, remap_split_decl_to_dummy, &swapinfo, NULL); | |
1644 gcc_assert (swapinfo.done); | |
1645 vargs.safe_push (arg); | |
1646 /* Now let's push all replacements pertaining to this parameter | |
1647 so that all gimple register ones get correct SSA_NAMES. Edge | |
1648 redirection will weed out the dummy argument as well as all | |
1649 unused replacements later. */ | |
1650 unsigned int repl_list_len = m_replacements.length (); | |
1651 for (; j < repl_list_len; j++) | |
1652 { | |
1653 if (m_replacements[j].base != base) | |
1654 break; | |
1655 vargs.safe_push (m_replacements[j].repl); | |
1656 } | |
1657 } | |
1658 else | |
1659 { | |
1660 tree t = gimple_call_arg (stmt, i); | |
1661 modify_expression (&t, true); | |
1662 vargs.safe_push (t); | |
1663 } | |
1664 } | |
1665 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs); | |
1666 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); | |
1667 gimple_call_copy_flags (new_stmt, stmt); | |
1668 if (tree lhs = gimple_call_lhs (stmt)) | |
1669 { | |
1670 modify_expression (&lhs, false); | |
1671 gimple_call_set_lhs (new_stmt, lhs); | |
1672 } | |
1673 *stmt_p = new_stmt; | |
1674 return true; | |
1675 } | |
1676 | |
1677 /* Otherwise, no need to rebuild the statement, let's just modify arguments | |
1678 and the LHS if/as appropriate. */ | |
1679 bool modified = false; | |
1680 for (unsigned i = 0; i < nargs; i++) | |
1681 { | |
1682 tree *t = gimple_call_arg_ptr (stmt, i); | |
1683 modified |= modify_expression (t, true); | |
1684 } | |
1685 | |
1686 if (gimple_call_lhs (stmt)) | |
1687 { | |
1688 tree *t = gimple_call_lhs_ptr (stmt); | |
1689 modified |= modify_expression (t, false); | |
1690 } | |
1691 | |
1692 return modified; | |
1693 } | |
1694 | |
1695 /* If the statement STMT contains any expressions that need to replaced with a | |
1696 different one as noted by ADJUSTMENTS, do so. Handle any potential type | |
1697 incompatibilities. If any conversion sttements have to be pre-pended to | |
1698 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was | |
1699 modified. */ | |
1700 | |
1701 bool | |
1702 ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt, | |
1703 gimple_seq *extra_stmts) | |
1704 { | |
1705 bool modified = false; | |
1706 tree *t; | |
1707 | |
1708 switch (gimple_code (*stmt)) | |
1709 { | |
1710 case GIMPLE_RETURN: | |
1711 t = gimple_return_retval_ptr (as_a <greturn *> (*stmt)); | |
1712 if (m_adjustments && m_adjustments->m_skip_return) | |
1713 *t = NULL_TREE; | |
1714 else if (*t != NULL_TREE) | |
1715 modified |= modify_expression (t, true); | |
1716 break; | |
1717 | |
1718 case GIMPLE_ASSIGN: | |
1719 modified |= modify_assignment (*stmt, extra_stmts); | |
1720 break; | |
1721 | |
1722 case GIMPLE_CALL: | |
1723 modified |= modify_call_stmt ((gcall **) stmt); | |
1724 break; | |
1725 | |
1726 case GIMPLE_ASM: | |
1727 { | |
1728 gasm *asm_stmt = as_a <gasm *> (*stmt); | |
1729 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++) | |
1730 { | |
1731 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i)); | |
1732 modified |= modify_expression (t, true); | |
1733 } | |
1734 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++) | |
1735 { | |
1736 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i)); | |
1737 modified |= modify_expression (t, false); | |
1738 } | |
1739 } | |
1740 break; | |
1741 | |
1742 default: | |
1743 break; | |
1744 } | |
1745 return modified; | |
1746 } | |
1747 | |
1748 | |
1749 /* Traverse body of the current function and perform the requested adjustments | |
1750 on its statements. Return true iff the CFG has been changed. */ | |
1751 | |
1752 bool | |
1753 ipa_param_body_adjustments::modify_cfun_body () | |
1754 { | |
1755 bool cfg_changed = false; | |
1756 basic_block bb; | |
1757 | |
1758 FOR_EACH_BB_FN (bb, cfun) | |
1759 { | |
1760 gimple_stmt_iterator gsi; | |
1761 | |
1762 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1763 { | |
1764 gphi *phi = as_a <gphi *> (gsi_stmt (gsi)); | |
1765 tree new_lhs, old_lhs = gimple_phi_result (phi); | |
1766 new_lhs = replace_removed_params_ssa_names (old_lhs, phi); | |
1767 if (new_lhs) | |
1768 { | |
1769 gimple_phi_set_result (phi, new_lhs); | |
1770 release_ssa_name (old_lhs); | |
1771 } | |
1772 } | |
1773 | |
1774 gsi = gsi_start_bb (bb); | |
1775 while (!gsi_end_p (gsi)) | |
1776 { | |
1777 gimple *stmt = gsi_stmt (gsi); | |
1778 gimple *stmt_copy = stmt; | |
1779 gimple_seq extra_stmts = NULL; | |
1780 bool modified = modify_gimple_stmt (&stmt, &extra_stmts); | |
1781 if (stmt != stmt_copy) | |
1782 { | |
1783 gcc_checking_assert (modified); | |
1784 gsi_replace (&gsi, stmt, false); | |
1785 } | |
1786 if (!gimple_seq_empty_p (extra_stmts)) | |
1787 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT); | |
1788 | |
1789 def_operand_p defp; | |
1790 ssa_op_iter iter; | |
1791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF) | |
1792 { | |
1793 tree old_def = DEF_FROM_PTR (defp); | |
1794 if (tree new_def = replace_removed_params_ssa_names (old_def, | |
1795 stmt)) | |
1796 { | |
1797 SET_DEF (defp, new_def); | |
1798 release_ssa_name (old_def); | |
1799 modified = true; | |
1800 } | |
1801 } | |
1802 | |
1803 if (modified) | |
1804 { | |
1805 update_stmt (stmt); | |
1806 if (maybe_clean_eh_stmt (stmt) | |
1807 && gimple_purge_dead_eh_edges (gimple_bb (stmt))) | |
1808 cfg_changed = true; | |
1809 } | |
1810 gsi_next (&gsi); | |
1811 } | |
1812 } | |
1813 | |
1814 return cfg_changed; | |
1815 } | |
1816 | |
1817 /* Call gimple_debug_bind_reset_value on all debug statements describing | |
1818 gimple register parameters that are being removed or replaced. */ | |
712 | 1819 |
713 void | 1820 void |
714 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments, | 1821 ipa_param_body_adjustments::reset_debug_stmts () |
715 tree fndecl) | 1822 { |
716 { | 1823 int i, len; |
717 int i, len = adjustments.length (); | 1824 gimple_stmt_iterator *gsip = NULL, gsi; |
718 bool first = true; | 1825 |
719 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl); | 1826 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun))) |
720 | 1827 { |
721 fprintf (file, "IPA param adjustments: "); | 1828 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))); |
1829 gsip = &gsi; | |
1830 } | |
1831 len = m_reset_debug_decls.length (); | |
722 for (i = 0; i < len; i++) | 1832 for (i = 0; i < len; i++) |
723 { | 1833 { |
724 struct ipa_parm_adjustment *adj; | 1834 imm_use_iterator ui; |
725 adj = &adjustments[i]; | 1835 gimple *stmt; |
726 | 1836 gdebug *def_temp; |
727 if (!first) | 1837 tree name, vexpr, copy = NULL_TREE; |
728 fprintf (file, " "); | 1838 use_operand_p use_p; |
729 else | 1839 tree decl = m_reset_debug_decls[i]; |
730 first = false; | 1840 |
731 | 1841 gcc_checking_assert (is_gimple_reg (decl)); |
732 fprintf (file, "%i. base_index: %i - ", i, adj->base_index); | 1842 name = ssa_default_def (cfun, decl); |
733 print_generic_expr (file, parms[adj->base_index]); | 1843 vexpr = NULL; |
734 if (adj->base) | 1844 if (name) |
735 { | 1845 FOR_EACH_IMM_USE_STMT (stmt, ui, name) |
736 fprintf (file, ", base: "); | 1846 { |
737 print_generic_expr (file, adj->base); | 1847 if (gimple_clobber_p (stmt)) |
738 } | 1848 { |
739 if (adj->new_decl) | 1849 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt); |
740 { | 1850 unlink_stmt_vdef (stmt); |
741 fprintf (file, ", new_decl: "); | 1851 gsi_remove (&cgsi, true); |
742 print_generic_expr (file, adj->new_decl); | 1852 release_defs (stmt); |
743 } | 1853 continue; |
744 if (adj->new_ssa_base) | 1854 } |
745 { | 1855 /* All other users must have been removed by function body |
746 fprintf (file, ", new_ssa_base: "); | 1856 modification. */ |
747 print_generic_expr (file, adj->new_ssa_base); | 1857 gcc_assert (is_gimple_debug (stmt)); |
748 } | 1858 if (vexpr == NULL && gsip != NULL) |
749 | 1859 { |
750 if (adj->op == IPA_PARM_OP_COPY) | 1860 vexpr = make_node (DEBUG_EXPR_DECL); |
751 fprintf (file, ", copy_param"); | 1861 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL); |
752 else if (adj->op == IPA_PARM_OP_REMOVE) | 1862 DECL_ARTIFICIAL (vexpr) = 1; |
753 fprintf (file, ", remove_param"); | 1863 TREE_TYPE (vexpr) = TREE_TYPE (name); |
754 else | 1864 SET_DECL_MODE (vexpr, DECL_MODE (decl)); |
755 { | 1865 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT); |
756 fprintf (file, ", offset "); | 1866 } |
757 print_dec (adj->offset, file); | 1867 if (vexpr) |
758 } | 1868 { |
759 if (adj->by_ref) | 1869 FOR_EACH_IMM_USE_ON_STMT (use_p, ui) |
760 fprintf (file, ", by_ref"); | 1870 SET_USE (use_p, vexpr); |
761 print_node_brief (file, ", type: ", adj->type, 0); | 1871 } |
762 fprintf (file, "\n"); | 1872 else |
763 } | 1873 gimple_debug_bind_reset_value (stmt); |
764 parms.release (); | 1874 update_stmt (stmt); |
765 } | 1875 } |
766 | 1876 /* Create a VAR_DECL for debug info purposes. */ |
1877 if (!DECL_IGNORED_P (decl)) | |
1878 { | |
1879 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl), | |
1880 VAR_DECL, DECL_NAME (decl), | |
1881 TREE_TYPE (decl)); | |
1882 if (DECL_PT_UID_SET_P (decl)) | |
1883 SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
1884 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
1885 TREE_READONLY (copy) = TREE_READONLY (decl); | |
1886 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
1887 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); | |
1888 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); | |
1889 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); | |
1890 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); | |
1891 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1; | |
1892 SET_DECL_RTL (copy, 0); | |
1893 TREE_USED (copy) = 1; | |
1894 DECL_CONTEXT (copy) = current_function_decl; | |
1895 add_local_decl (cfun, copy); | |
1896 DECL_CHAIN (copy) | |
1897 = BLOCK_VARS (DECL_INITIAL (current_function_decl)); | |
1898 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy; | |
1899 } | |
1900 if (gsip != NULL && copy && target_for_debug_bind (decl)) | |
1901 { | |
1902 gcc_assert (TREE_CODE (decl) == PARM_DECL); | |
1903 if (vexpr) | |
1904 def_temp = gimple_build_debug_bind (copy, vexpr, NULL); | |
1905 else | |
1906 def_temp = gimple_build_debug_source_bind (copy, decl, | |
1907 NULL); | |
1908 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT); | |
1909 } | |
1910 } | |
1911 } | |
1912 | |
1913 /* Perform all necessary body changes to change signature, body and debug info | |
1914 of fun according to adjustments passed at construction. Return true if CFG | |
1915 was changed in any way. The main entry point for modification of standalone | |
1916 functions that is not part of IPA clone materialization. */ | |
1917 | |
1918 bool | |
1919 ipa_param_body_adjustments::perform_cfun_body_modifications () | |
1920 { | |
1921 bool cfg_changed; | |
1922 modify_formal_parameters (); | |
1923 cfg_changed = modify_cfun_body (); | |
1924 reset_debug_stmts (); | |
1925 | |
1926 return cfg_changed; | |
1927 } | |
1928 |