Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-stdarg.c @ 158:494b0b89df80 default tip
...
author | Shinji KONO <kono@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 25 May 2020 18:13:55 +0900 |
parents | 1830386684a0 |
children |
rev | line source |
---|---|
0 | 1 /* Pass computing data for optimizing stdarg functions. |
145 | 2 Copyright (C) 2004-2020 Free Software Foundation, Inc. |
0 | 3 Contributed by Jakub Jelinek <jakub@redhat.com> |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify | |
8 it under the terms of the GNU General Public License as published by | |
9 the Free Software Foundation; either version 3, or (at your option) | |
10 any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, | |
13 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 GNU General Public License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
111 | 24 #include "backend.h" |
25 #include "target.h" | |
0 | 26 #include "tree.h" |
111 | 27 #include "gimple.h" |
28 #include "tree-pass.h" | |
29 #include "ssa.h" | |
63
b7f97abdc517
update gcc from gcc-4.5.0 to gcc-4.6
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
55
diff
changeset
|
30 #include "gimple-pretty-print.h" |
111 | 31 #include "fold-const.h" |
32 #include "langhooks.h" | |
33 #include "gimple-iterator.h" | |
34 #include "gimple-walk.h" | |
35 #include "gimplify.h" | |
36 #include "tree-into-ssa.h" | |
37 #include "tree-cfg.h" | |
0 | 38 #include "tree-stdarg.h" |
39 | |
40 /* A simple pass that attempts to optimize stdarg functions on architectures | |
41 that need to save register arguments to stack on entry to stdarg functions. | |
42 If the function doesn't use any va_start macros, no registers need to | |
43 be saved. If va_start macros are used, the va_list variables don't escape | |
44 the function, it is only necessary to save registers that will be used | |
45 in va_arg macros. E.g. if va_arg is only used with integral types | |
46 in the function, floating point registers don't need to be saved, etc. */ | |
47 | |
48 | |
49 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and | |
50 is executed at most as many times as VA_START_BB. */ | |
51 | |
52 static bool | |
53 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) | |
54 { | |
111 | 55 auto_vec<edge, 10> stack; |
0 | 56 edge e; |
57 edge_iterator ei; | |
58 bool ret; | |
59 | |
60 if (va_arg_bb == va_start_bb) | |
61 return true; | |
62 | |
63 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) | |
64 return false; | |
65 | |
111 | 66 auto_sbitmap visited (last_basic_block_for_fn (cfun)); |
67 bitmap_clear (visited); | |
0 | 68 ret = true; |
69 | |
70 FOR_EACH_EDGE (e, ei, va_arg_bb->preds) | |
111 | 71 stack.safe_push (e); |
0 | 72 |
111 | 73 while (! stack.is_empty ()) |
0 | 74 { |
75 basic_block src; | |
76 | |
111 | 77 e = stack.pop (); |
0 | 78 src = e->src; |
79 | |
80 if (e->flags & EDGE_COMPLEX) | |
81 { | |
82 ret = false; | |
83 break; | |
84 } | |
85 | |
86 if (src == va_start_bb) | |
87 continue; | |
88 | |
89 /* va_arg_bb can be executed more times than va_start_bb. */ | |
90 if (src == va_arg_bb) | |
91 { | |
92 ret = false; | |
93 break; | |
94 } | |
95 | |
111 | 96 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
0 | 97 |
111 | 98 if (! bitmap_bit_p (visited, src->index)) |
0 | 99 { |
111 | 100 bitmap_set_bit (visited, src->index); |
0 | 101 FOR_EACH_EDGE (e, ei, src->preds) |
111 | 102 stack.safe_push (e); |
0 | 103 } |
104 } | |
105 | |
106 return ret; | |
107 } | |
108 | |
109 | |
110 /* For statement COUNTER = RHS, if RHS is COUNTER + constant, | |
111 | 111 return constant, otherwise return HOST_WIDE_INT_M1U. |
0 | 112 GPR_P is true if this is GPR counter. */ |
113 | |
114 static unsigned HOST_WIDE_INT | |
115 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs, | |
116 bool gpr_p) | |
117 { | |
118 tree lhs, orig_lhs; | |
111 | 119 gimple *stmt; |
0 | 120 unsigned HOST_WIDE_INT ret = 0, val, counter_val; |
121 unsigned int max_size; | |
122 | |
123 if (si->offsets == NULL) | |
124 { | |
125 unsigned int i; | |
126 | |
127 si->offsets = XNEWVEC (int, num_ssa_names); | |
128 for (i = 0; i < num_ssa_names; ++i) | |
129 si->offsets[i] = -1; | |
130 } | |
131 | |
132 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size; | |
133 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE; | |
134 orig_lhs = lhs = rhs; | |
135 while (lhs) | |
136 { | |
137 enum tree_code rhs_code; | |
111 | 138 tree rhs1; |
0 | 139 |
140 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) | |
141 { | |
142 if (counter_val >= max_size) | |
143 { | |
144 ret = max_size; | |
145 break; | |
146 } | |
147 | |
148 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)]; | |
149 break; | |
150 } | |
151 | |
152 stmt = SSA_NAME_DEF_STMT (lhs); | |
153 | |
154 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs) | |
111 | 155 return HOST_WIDE_INT_M1U; |
0 | 156 |
157 rhs_code = gimple_assign_rhs_code (stmt); | |
111 | 158 rhs1 = gimple_assign_rhs1 (stmt); |
0 | 159 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS |
160 || gimple_assign_cast_p (stmt)) | |
111 | 161 && TREE_CODE (rhs1) == SSA_NAME) |
0 | 162 { |
111 | 163 lhs = rhs1; |
0 | 164 continue; |
165 } | |
166 | |
167 if ((rhs_code == POINTER_PLUS_EXPR | |
168 || rhs_code == PLUS_EXPR) | |
111 | 169 && TREE_CODE (rhs1) == SSA_NAME |
170 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))) | |
0 | 171 { |
111 | 172 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt)); |
173 lhs = rhs1; | |
174 continue; | |
175 } | |
176 | |
177 if (rhs_code == ADDR_EXPR | |
178 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF | |
179 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME | |
180 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1))) | |
181 { | |
182 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)); | |
183 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); | |
0 | 184 continue; |
185 } | |
186 | |
187 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS) | |
111 | 188 return HOST_WIDE_INT_M1U; |
0 | 189 |
190 rhs = gimple_assign_rhs1 (stmt); | |
191 if (TREE_CODE (counter) != TREE_CODE (rhs)) | |
111 | 192 return HOST_WIDE_INT_M1U; |
0 | 193 |
194 if (TREE_CODE (counter) == COMPONENT_REF) | |
195 { | |
196 if (get_base_address (counter) != get_base_address (rhs) | |
197 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL | |
198 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1)) | |
111 | 199 return HOST_WIDE_INT_M1U; |
0 | 200 } |
201 else if (counter != rhs) | |
111 | 202 return HOST_WIDE_INT_M1U; |
0 | 203 |
204 lhs = NULL; | |
205 } | |
206 | |
207 lhs = orig_lhs; | |
208 val = ret + counter_val; | |
209 while (lhs) | |
210 { | |
211 enum tree_code rhs_code; | |
111 | 212 tree rhs1; |
0 | 213 |
214 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) | |
215 break; | |
216 | |
217 if (val >= max_size) | |
218 si->offsets[SSA_NAME_VERSION (lhs)] = max_size; | |
219 else | |
220 si->offsets[SSA_NAME_VERSION (lhs)] = val; | |
221 | |
222 stmt = SSA_NAME_DEF_STMT (lhs); | |
223 | |
224 rhs_code = gimple_assign_rhs_code (stmt); | |
111 | 225 rhs1 = gimple_assign_rhs1 (stmt); |
0 | 226 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS |
227 || gimple_assign_cast_p (stmt)) | |
111 | 228 && TREE_CODE (rhs1) == SSA_NAME) |
0 | 229 { |
111 | 230 lhs = rhs1; |
0 | 231 continue; |
232 } | |
233 | |
234 if ((rhs_code == POINTER_PLUS_EXPR | |
235 || rhs_code == PLUS_EXPR) | |
111 | 236 && TREE_CODE (rhs1) == SSA_NAME |
237 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))) | |
0 | 238 { |
111 | 239 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt)); |
240 lhs = rhs1; | |
241 continue; | |
242 } | |
243 | |
244 if (rhs_code == ADDR_EXPR | |
245 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF | |
246 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME | |
247 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1))) | |
248 { | |
249 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)); | |
250 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); | |
0 | 251 continue; |
252 } | |
253 | |
254 lhs = NULL; | |
255 } | |
256 | |
257 return ret; | |
258 } | |
259 | |
260 | |
261 /* Called by walk_tree to look for references to va_list variables. */ | |
262 | |
263 static tree | |
264 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
265 void *data) | |
266 { | |
267 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info; | |
268 tree var = *tp; | |
269 | |
270 if (TREE_CODE (var) == SSA_NAME) | |
111 | 271 { |
272 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var))) | |
273 return var; | |
274 } | |
275 else if (VAR_P (var)) | |
276 { | |
277 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names)) | |
278 return var; | |
279 } | |
0 | 280 |
281 return NULL_TREE; | |
282 } | |
283 | |
284 | |
285 /* Helper function of va_list_counter_struct_op. Compute | |
286 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter, | |
287 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP | |
288 statement. GPR_P is true if AP is a GPR counter, false if it is | |
289 a FPR counter. */ | |
290 | |
291 static void | |
292 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p, | |
293 bool write_p) | |
294 { | |
295 unsigned HOST_WIDE_INT increment; | |
296 | |
297 if (si->compute_sizes < 0) | |
298 { | |
299 si->compute_sizes = 0; | |
300 if (si->va_start_count == 1 | |
301 && reachable_at_most_once (si->bb, si->va_start_bb)) | |
302 si->compute_sizes = 1; | |
303 | |
304 if (dump_file && (dump_flags & TDF_DETAILS)) | |
305 fprintf (dump_file, | |
306 "bb%d will %sbe executed at most once for each va_start " | |
307 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", | |
308 si->va_start_bb->index); | |
309 } | |
310 | |
311 if (write_p | |
312 && si->compute_sizes | |
313 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1) | |
314 { | |
315 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) | |
316 { | |
317 cfun->va_list_gpr_size += increment; | |
318 return; | |
319 } | |
320 | |
321 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE) | |
322 { | |
323 cfun->va_list_fpr_size += increment; | |
324 return; | |
325 } | |
326 } | |
327 | |
328 if (write_p || !si->compute_sizes) | |
329 { | |
330 if (gpr_p) | |
331 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
332 else | |
333 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
334 } | |
335 } | |
336 | |
337 | |
338 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size. | |
339 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P | |
340 is false, AP has been seen in VAR = AP assignment. | |
341 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized | |
342 va_arg operation that doesn't cause the va_list variable to escape | |
343 current function. */ | |
344 | |
345 static bool | |
346 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var, | |
347 bool write_p) | |
348 { | |
349 tree base; | |
350 | |
351 if (TREE_CODE (ap) != COMPONENT_REF | |
352 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL) | |
353 return false; | |
354 | |
355 if (TREE_CODE (var) != SSA_NAME | |
111 | 356 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var))) |
0 | 357 return false; |
358 | |
359 base = get_base_address (ap); | |
111 | 360 if (!VAR_P (base) |
361 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names)) | |
0 | 362 return false; |
363 | |
364 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field) | |
365 va_list_counter_op (si, ap, var, true, write_p); | |
366 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field) | |
367 va_list_counter_op (si, ap, var, false, write_p); | |
368 | |
369 return true; | |
370 } | |
371 | |
372 | |
373 /* Check for TEM = AP. Return true if found and the caller shouldn't | |
374 search for va_list references in the statement. */ | |
375 | |
376 static bool | |
377 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem) | |
378 { | |
111 | 379 if (!VAR_P (ap) |
380 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) | |
0 | 381 return false; |
382 | |
383 if (TREE_CODE (tem) != SSA_NAME | |
111 | 384 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem))) |
0 | 385 return false; |
386 | |
387 if (si->compute_sizes < 0) | |
388 { | |
389 si->compute_sizes = 0; | |
390 if (si->va_start_count == 1 | |
391 && reachable_at_most_once (si->bb, si->va_start_bb)) | |
392 si->compute_sizes = 1; | |
393 | |
394 if (dump_file && (dump_flags & TDF_DETAILS)) | |
395 fprintf (dump_file, | |
396 "bb%d will %sbe executed at most once for each va_start " | |
397 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", | |
398 si->va_start_bb->index); | |
399 } | |
400 | |
401 /* For void * or char * va_list types, there is just one counter. | |
402 If va_arg is used in a loop, we don't know how many registers need | |
403 saving. */ | |
404 if (! si->compute_sizes) | |
405 return false; | |
406 | |
111 | 407 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U) |
0 | 408 return false; |
409 | |
410 /* Note the temporary, as we need to track whether it doesn't escape | |
411 the current function. */ | |
111 | 412 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem)); |
413 | |
0 | 414 return true; |
415 } | |
416 | |
417 | |
418 /* Check for: | |
419 tem1 = AP; | |
420 TEM2 = tem1 + CST; | |
421 AP = TEM2; | |
422 sequence and update cfun->va_list_gpr_size. Return true if found. */ | |
423 | |
424 static bool | |
425 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2) | |
426 { | |
427 unsigned HOST_WIDE_INT increment; | |
428 | |
111 | 429 if (!VAR_P (ap) |
430 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) | |
0 | 431 return false; |
432 | |
433 if (TREE_CODE (tem2) != SSA_NAME | |
111 | 434 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2))) |
0 | 435 return false; |
436 | |
437 if (si->compute_sizes <= 0) | |
438 return false; | |
439 | |
440 increment = va_list_counter_bump (si, ap, tem2, true); | |
441 if (increment + 1 <= 1) | |
442 return false; | |
443 | |
444 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) | |
445 cfun->va_list_gpr_size += increment; | |
446 else | |
447 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
448 | |
449 return true; | |
450 } | |
451 | |
452 | |
453 /* If RHS is X, (some type *) X or X + CST for X a temporary variable | |
454 containing value of some va_list variable plus optionally some constant, | |
455 either set si->va_list_escapes or add LHS to si->va_list_escape_vars, | |
456 depending whether LHS is a function local temporary. */ | |
457 | |
458 static void | |
459 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs) | |
460 { | |
461 if (! POINTER_TYPE_P (TREE_TYPE (rhs))) | |
462 return; | |
463 | |
111 | 464 if (TREE_CODE (rhs) == SSA_NAME) |
465 { | |
466 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs))) | |
467 return; | |
468 } | |
469 else if (TREE_CODE (rhs) == ADDR_EXPR | |
470 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF | |
471 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME) | |
472 { | |
473 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0); | |
474 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr))) | |
475 return; | |
476 } | |
477 else | |
0 | 478 return; |
479 | |
111 | 480 if (TREE_CODE (lhs) != SSA_NAME) |
0 | 481 { |
482 si->va_list_escapes = true; | |
483 return; | |
484 } | |
485 | |
486 if (si->compute_sizes < 0) | |
487 { | |
488 si->compute_sizes = 0; | |
489 if (si->va_start_count == 1 | |
490 && reachable_at_most_once (si->bb, si->va_start_bb)) | |
491 si->compute_sizes = 1; | |
492 | |
493 if (dump_file && (dump_flags & TDF_DETAILS)) | |
494 fprintf (dump_file, | |
495 "bb%d will %sbe executed at most once for each va_start " | |
496 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", | |
497 si->va_start_bb->index); | |
498 } | |
499 | |
500 /* For void * or char * va_list types, there is just one counter. | |
501 If va_arg is used in a loop, we don't know how many registers need | |
502 saving. */ | |
503 if (! si->compute_sizes) | |
504 { | |
505 si->va_list_escapes = true; | |
506 return; | |
507 } | |
508 | |
509 if (va_list_counter_bump (si, si->va_start_ap, lhs, true) | |
111 | 510 == HOST_WIDE_INT_M1U) |
0 | 511 { |
512 si->va_list_escapes = true; | |
513 return; | |
514 } | |
515 | |
111 | 516 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs)); |
0 | 517 } |
518 | |
519 | |
520 /* Check all uses of temporaries from si->va_list_escape_vars bitmap. | |
521 Return true if va_list might be escaping. */ | |
522 | |
523 static bool | |
524 check_all_va_list_escapes (struct stdarg_info *si) | |
525 { | |
526 basic_block bb; | |
527 | |
111 | 528 FOR_EACH_BB_FN (bb, cfun) |
0 | 529 { |
111 | 530 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i); |
531 gsi_next (&i)) | |
532 { | |
533 tree lhs; | |
534 use_operand_p uop; | |
535 ssa_op_iter soi; | |
536 gphi *phi = i.phi (); | |
537 | |
538 lhs = PHI_RESULT (phi); | |
539 if (virtual_operand_p (lhs) | |
540 || bitmap_bit_p (si->va_list_escape_vars, | |
541 SSA_NAME_VERSION (lhs))) | |
542 continue; | |
0 | 543 |
111 | 544 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) |
545 { | |
546 tree rhs = USE_FROM_PTR (uop); | |
547 if (TREE_CODE (rhs) == SSA_NAME | |
548 && bitmap_bit_p (si->va_list_escape_vars, | |
549 SSA_NAME_VERSION (rhs))) | |
550 { | |
551 if (dump_file && (dump_flags & TDF_DETAILS)) | |
552 { | |
553 fputs ("va_list escapes in ", dump_file); | |
554 print_gimple_stmt (dump_file, phi, 0, dump_flags); | |
555 fputc ('\n', dump_file); | |
556 } | |
557 return true; | |
558 } | |
559 } | |
560 } | |
561 | |
562 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i); | |
563 gsi_next (&i)) | |
0 | 564 { |
111 | 565 gimple *stmt = gsi_stmt (i); |
0 | 566 tree use; |
567 ssa_op_iter iter; | |
568 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
569 if (is_gimple_debug (stmt)) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
570 continue; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
571 |
0 | 572 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES) |
573 { | |
574 if (! bitmap_bit_p (si->va_list_escape_vars, | |
111 | 575 SSA_NAME_VERSION (use))) |
0 | 576 continue; |
577 | |
578 if (is_gimple_assign (stmt)) | |
579 { | |
580 tree rhs = gimple_assign_rhs1 (stmt); | |
581 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | |
582 | |
583 /* x = *ap_temp; */ | |
111 | 584 if (rhs_code == MEM_REF |
0 | 585 && TREE_OPERAND (rhs, 0) == use |
586 && TYPE_SIZE_UNIT (TREE_TYPE (rhs)) | |
111 | 587 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs))) |
0 | 588 && si->offsets[SSA_NAME_VERSION (use)] != -1) |
589 { | |
590 unsigned HOST_WIDE_INT gpr_size; | |
591 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs)); | |
592 | |
593 gpr_size = si->offsets[SSA_NAME_VERSION (use)] | |
111 | 594 + tree_to_shwi (TREE_OPERAND (rhs, 1)) |
595 + tree_to_uhwi (access_size); | |
0 | 596 if (gpr_size >= VA_LIST_MAX_GPR_SIZE) |
597 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
598 else if (gpr_size > cfun->va_list_gpr_size) | |
599 cfun->va_list_gpr_size = gpr_size; | |
600 continue; | |
601 } | |
602 | |
603 /* va_arg sequences may contain | |
604 other_ap_temp = ap_temp; | |
605 other_ap_temp = ap_temp + constant; | |
606 other_ap_temp = (some_type *) ap_temp; | |
607 ap = ap_temp; | |
608 statements. */ | |
609 if (rhs == use | |
610 && ((rhs_code == POINTER_PLUS_EXPR | |
611 && (TREE_CODE (gimple_assign_rhs2 (stmt)) | |
612 == INTEGER_CST)) | |
613 || gimple_assign_cast_p (stmt) | |
614 || (get_gimple_rhs_class (rhs_code) | |
615 == GIMPLE_SINGLE_RHS))) | |
616 { | |
617 tree lhs = gimple_assign_lhs (stmt); | |
618 | |
619 if (TREE_CODE (lhs) == SSA_NAME | |
620 && bitmap_bit_p (si->va_list_escape_vars, | |
111 | 621 SSA_NAME_VERSION (lhs))) |
0 | 622 continue; |
623 | |
111 | 624 if (VAR_P (lhs) |
0 | 625 && bitmap_bit_p (si->va_list_vars, |
111 | 626 DECL_UID (lhs) + num_ssa_names)) |
627 continue; | |
628 } | |
629 else if (rhs_code == ADDR_EXPR | |
630 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF | |
631 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use) | |
632 { | |
633 tree lhs = gimple_assign_lhs (stmt); | |
634 | |
635 if (bitmap_bit_p (si->va_list_escape_vars, | |
636 SSA_NAME_VERSION (lhs))) | |
0 | 637 continue; |
638 } | |
639 } | |
640 | |
641 if (dump_file && (dump_flags & TDF_DETAILS)) | |
642 { | |
643 fputs ("va_list escapes in ", dump_file); | |
644 print_gimple_stmt (dump_file, stmt, 0, dump_flags); | |
645 fputc ('\n', dump_file); | |
646 } | |
647 return true; | |
648 } | |
649 } | |
650 } | |
651 | |
652 return false; | |
653 } | |
654 | |
111 | 655 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */ |
0 | 656 |
111 | 657 static void |
658 optimize_va_list_gpr_fpr_size (function *fun) | |
0 | 659 { |
660 basic_block bb; | |
661 bool va_list_escapes = false; | |
662 bool va_list_simple_ptr; | |
663 struct stdarg_info si; | |
664 struct walk_stmt_info wi; | |
665 const char *funcname = NULL; | |
666 tree cfun_va_list; | |
667 | |
111 | 668 fun->va_list_gpr_size = 0; |
669 fun->va_list_fpr_size = 0; | |
0 | 670 memset (&si, 0, sizeof (si)); |
671 si.va_list_vars = BITMAP_ALLOC (NULL); | |
672 si.va_list_escape_vars = BITMAP_ALLOC (NULL); | |
673 | |
674 if (dump_file) | |
675 funcname = lang_hooks.decl_printable_name (current_function_decl, 2); | |
676 | |
111 | 677 cfun_va_list = targetm.fn_abi_va_list (fun->decl); |
0 | 678 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) |
679 && (TREE_TYPE (cfun_va_list) == void_type_node | |
680 || TREE_TYPE (cfun_va_list) == char_type_node); | |
681 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); | |
682 | |
111 | 683 FOR_EACH_BB_FN (bb, fun) |
0 | 684 { |
685 gimple_stmt_iterator i; | |
686 | |
687 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
688 { | |
111 | 689 gimple *stmt = gsi_stmt (i); |
0 | 690 tree callee, ap; |
691 | |
692 if (!is_gimple_call (stmt)) | |
693 continue; | |
694 | |
695 callee = gimple_call_fndecl (stmt); | |
696 if (!callee | |
131 | 697 || !fndecl_built_in_p (callee, BUILT_IN_NORMAL)) |
0 | 698 continue; |
699 | |
700 switch (DECL_FUNCTION_CODE (callee)) | |
701 { | |
702 case BUILT_IN_VA_START: | |
703 break; | |
704 /* If old style builtins are used, don't optimize anything. */ | |
705 case BUILT_IN_SAVEREGS: | |
706 case BUILT_IN_NEXT_ARG: | |
707 va_list_escapes = true; | |
708 continue; | |
709 default: | |
710 continue; | |
711 } | |
712 | |
713 si.va_start_count++; | |
714 ap = gimple_call_arg (stmt, 0); | |
715 | |
716 if (TREE_CODE (ap) != ADDR_EXPR) | |
717 { | |
718 va_list_escapes = true; | |
719 break; | |
720 } | |
721 ap = TREE_OPERAND (ap, 0); | |
722 if (TREE_CODE (ap) == ARRAY_REF) | |
723 { | |
724 if (! integer_zerop (TREE_OPERAND (ap, 1))) | |
725 { | |
726 va_list_escapes = true; | |
727 break; | |
728 } | |
729 ap = TREE_OPERAND (ap, 0); | |
730 } | |
731 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap)) | |
111 | 732 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl)) |
733 || !VAR_P (ap)) | |
0 | 734 { |
735 va_list_escapes = true; | |
736 break; | |
737 } | |
738 | |
739 if (is_global_var (ap)) | |
740 { | |
741 va_list_escapes = true; | |
742 break; | |
743 } | |
744 | |
111 | 745 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names); |
0 | 746 |
747 /* VA_START_BB and VA_START_AP will be only used if there is just | |
748 one va_start in the function. */ | |
749 si.va_start_bb = bb; | |
750 si.va_start_ap = ap; | |
751 } | |
752 | |
753 if (va_list_escapes) | |
754 break; | |
755 } | |
756 | |
757 /* If there were no va_start uses in the function, there is no need to | |
758 save anything. */ | |
759 if (si.va_start_count == 0) | |
760 goto finish; | |
761 | |
762 /* If some va_list arguments weren't local, we can't optimize. */ | |
763 if (va_list_escapes) | |
764 goto finish; | |
765 | |
766 /* For void * or char * va_list, something useful can be done only | |
767 if there is just one va_start. */ | |
768 if (va_list_simple_ptr && si.va_start_count > 1) | |
769 { | |
770 va_list_escapes = true; | |
771 goto finish; | |
772 } | |
773 | |
774 /* For struct * va_list, if the backend didn't tell us what the counter fields | |
775 are, there is nothing more we can do. */ | |
776 if (!va_list_simple_ptr | |
777 && va_list_gpr_counter_field == NULL_TREE | |
778 && va_list_fpr_counter_field == NULL_TREE) | |
779 { | |
780 va_list_escapes = true; | |
781 goto finish; | |
782 } | |
783 | |
784 /* For void * or char * va_list there is just one counter | |
785 (va_list itself). Use VA_LIST_GPR_SIZE for it. */ | |
786 if (va_list_simple_ptr) | |
111 | 787 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; |
0 | 788 |
789 calculate_dominance_info (CDI_DOMINATORS); | |
790 memset (&wi, 0, sizeof (wi)); | |
791 wi.info = si.va_list_vars; | |
792 | |
111 | 793 FOR_EACH_BB_FN (bb, fun) |
0 | 794 { |
795 si.compute_sizes = -1; | |
796 si.bb = bb; | |
797 | |
798 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat | |
799 them as assignments for the purpose of escape analysis. This is | |
800 not needed for non-simple va_list because virtual phis don't perform | |
111 | 801 any real data movement. Also, check PHI nodes for taking address of |
802 the va_list vars. */ | |
803 tree lhs, rhs; | |
804 use_operand_p uop; | |
805 ssa_op_iter soi; | |
0 | 806 |
111 | 807 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i); |
808 gsi_next (&i)) | |
809 { | |
810 gphi *phi = i.phi (); | |
811 lhs = PHI_RESULT (phi); | |
812 | |
813 if (virtual_operand_p (lhs)) | |
814 continue; | |
815 | |
816 if (va_list_simple_ptr) | |
0 | 817 { |
818 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) | |
819 { | |
820 rhs = USE_FROM_PTR (uop); | |
821 if (va_list_ptr_read (&si, rhs, lhs)) | |
822 continue; | |
823 else if (va_list_ptr_write (&si, lhs, rhs)) | |
824 continue; | |
825 else | |
826 check_va_list_escapes (&si, lhs, rhs); | |
827 | |
828 if (si.va_list_escapes) | |
829 { | |
830 if (dump_file && (dump_flags & TDF_DETAILS)) | |
831 { | |
832 fputs ("va_list escapes in ", dump_file); | |
833 print_gimple_stmt (dump_file, phi, 0, dump_flags); | |
834 fputc ('\n', dump_file); | |
835 } | |
836 va_list_escapes = true; | |
837 } | |
838 } | |
839 } | |
111 | 840 |
841 for (unsigned j = 0; !va_list_escapes | |
842 && j < gimple_phi_num_args (phi); ++j) | |
843 if ((!va_list_simple_ptr | |
844 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME) | |
845 && walk_tree (gimple_phi_arg_def_ptr (phi, j), | |
846 find_va_list_reference, &wi, NULL)) | |
847 { | |
848 if (dump_file && (dump_flags & TDF_DETAILS)) | |
849 { | |
850 fputs ("va_list escapes in ", dump_file); | |
851 print_gimple_stmt (dump_file, phi, 0, dump_flags); | |
852 fputc ('\n', dump_file); | |
853 } | |
854 va_list_escapes = true; | |
855 } | |
0 | 856 } |
857 | |
111 | 858 for (gimple_stmt_iterator i = gsi_start_bb (bb); |
0 | 859 !gsi_end_p (i) && !va_list_escapes; |
860 gsi_next (&i)) | |
861 { | |
111 | 862 gimple *stmt = gsi_stmt (i); |
0 | 863 |
864 /* Don't look at __builtin_va_{start,end}, they are ok. */ | |
865 if (is_gimple_call (stmt)) | |
866 { | |
867 tree callee = gimple_call_fndecl (stmt); | |
868 | |
869 if (callee | |
131 | 870 && (fndecl_built_in_p (callee, BUILT_IN_VA_START) |
871 || fndecl_built_in_p (callee, BUILT_IN_VA_END))) | |
0 | 872 continue; |
873 } | |
874 | |
875 if (is_gimple_assign (stmt)) | |
876 { | |
111 | 877 lhs = gimple_assign_lhs (stmt); |
878 rhs = gimple_assign_rhs1 (stmt); | |
0 | 879 |
880 if (va_list_simple_ptr) | |
881 { | |
882 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
883 == GIMPLE_SINGLE_RHS) | |
884 { | |
111 | 885 /* Check for ap ={v} {}. */ |
886 if (TREE_CLOBBER_P (rhs)) | |
887 continue; | |
888 | |
0 | 889 /* Check for tem = ap. */ |
111 | 890 else if (va_list_ptr_read (&si, rhs, lhs)) |
0 | 891 continue; |
892 | |
893 /* Check for the last insn in: | |
894 tem1 = ap; | |
895 tem2 = tem1 + CST; | |
896 ap = tem2; | |
897 sequence. */ | |
898 else if (va_list_ptr_write (&si, lhs, rhs)) | |
899 continue; | |
900 } | |
901 | |
902 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR | |
903 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST) | |
904 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) | |
905 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
906 == GIMPLE_SINGLE_RHS)) | |
907 check_va_list_escapes (&si, lhs, rhs); | |
908 } | |
909 else | |
910 { | |
911 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
912 == GIMPLE_SINGLE_RHS) | |
913 { | |
111 | 914 /* Check for ap ={v} {}. */ |
915 if (TREE_CLOBBER_P (rhs)) | |
916 continue; | |
917 | |
0 | 918 /* Check for ap[0].field = temp. */ |
111 | 919 else if (va_list_counter_struct_op (&si, lhs, rhs, true)) |
0 | 920 continue; |
921 | |
922 /* Check for temp = ap[0].field. */ | |
923 else if (va_list_counter_struct_op (&si, rhs, lhs, | |
924 false)) | |
925 continue; | |
926 } | |
927 | |
928 /* Do any architecture specific checking. */ | |
929 if (targetm.stdarg_optimize_hook | |
930 && targetm.stdarg_optimize_hook (&si, stmt)) | |
931 continue; | |
932 } | |
933 } | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
934 else if (is_gimple_debug (stmt)) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
935 continue; |
0 | 936 |
937 /* All other uses of va_list are either va_copy (that is not handled | |
938 in this optimization), taking address of va_list variable or | |
939 passing va_list to other functions (in that case va_list might | |
940 escape the function and therefore va_start needs to set it up | |
941 fully), or some unexpected use of va_list. None of these should | |
942 happen in a gimplified VA_ARG_EXPR. */ | |
943 if (si.va_list_escapes | |
944 || walk_gimple_op (stmt, find_va_list_reference, &wi)) | |
945 { | |
946 if (dump_file && (dump_flags & TDF_DETAILS)) | |
947 { | |
948 fputs ("va_list escapes in ", dump_file); | |
949 print_gimple_stmt (dump_file, stmt, 0, dump_flags); | |
950 fputc ('\n', dump_file); | |
951 } | |
952 va_list_escapes = true; | |
953 } | |
954 } | |
955 | |
956 if (va_list_escapes) | |
957 break; | |
958 } | |
959 | |
960 if (! va_list_escapes | |
961 && va_list_simple_ptr | |
962 && ! bitmap_empty_p (si.va_list_escape_vars) | |
963 && check_all_va_list_escapes (&si)) | |
964 va_list_escapes = true; | |
965 | |
966 finish: | |
967 if (va_list_escapes) | |
968 { | |
111 | 969 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; |
970 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
0 | 971 } |
972 BITMAP_FREE (si.va_list_vars); | |
973 BITMAP_FREE (si.va_list_escape_vars); | |
974 free (si.offsets); | |
975 if (dump_file) | |
976 { | |
977 fprintf (dump_file, "%s: va_list escapes %d, needs to save ", | |
978 funcname, (int) va_list_escapes); | |
111 | 979 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE) |
0 | 980 fputs ("all", dump_file); |
981 else | |
982 fprintf (dump_file, "%d", cfun->va_list_gpr_size); | |
983 fputs (" GPR units and ", dump_file); | |
111 | 984 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE) |
0 | 985 fputs ("all", dump_file); |
986 else | |
987 fprintf (dump_file, "%d", cfun->va_list_fpr_size); | |
988 fputs (" FPR units.\n", dump_file); | |
989 } | |
111 | 990 } |
991 | |
992 /* Expand IFN_VA_ARGs in FUN. */ | |
993 | |
994 static void | |
995 expand_ifn_va_arg_1 (function *fun) | |
996 { | |
997 bool modified = false; | |
998 basic_block bb; | |
999 gimple_stmt_iterator i; | |
1000 location_t saved_location; | |
1001 | |
1002 FOR_EACH_BB_FN (bb, fun) | |
1003 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
1004 { | |
1005 gimple *stmt = gsi_stmt (i); | |
1006 tree ap, aptype, expr, lhs, type; | |
1007 gimple_seq pre = NULL, post = NULL; | |
1008 | |
1009 if (!gimple_call_internal_p (stmt, IFN_VA_ARG)) | |
1010 continue; | |
1011 | |
1012 modified = true; | |
1013 | |
1014 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1))); | |
1015 ap = gimple_call_arg (stmt, 0); | |
1016 aptype = TREE_TYPE (gimple_call_arg (stmt, 2)); | |
1017 gcc_assert (POINTER_TYPE_P (aptype)); | |
1018 | |
1019 /* Balanced out the &ap, usually added by build_va_arg. */ | |
1020 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap, | |
1021 build_int_cst (aptype, 0)); | |
1022 | |
1023 push_gimplify_context (false); | |
1024 saved_location = input_location; | |
1025 input_location = gimple_location (stmt); | |
1026 | |
1027 /* Make it easier for the backends by protecting the valist argument | |
1028 from multiple evaluations. */ | |
1029 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue); | |
1030 | |
1031 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post); | |
1032 | |
1033 lhs = gimple_call_lhs (stmt); | |
1034 if (lhs != NULL_TREE) | |
1035 { | |
1036 unsigned int nargs = gimple_call_num_args (stmt); | |
1037 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type)); | |
1038 | |
1039 if (nargs == 4) | |
1040 { | |
1041 /* We've transported the size of with WITH_SIZE_EXPR here as | |
1042 the last argument of the internal fn call. Now reinstate | |
1043 it. */ | |
1044 tree size = gimple_call_arg (stmt, nargs - 1); | |
1045 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size); | |
1046 } | |
1047 | |
1048 /* We use gimplify_assign here, rather than gimple_build_assign, | |
1049 because gimple_assign knows how to deal with variable-sized | |
1050 types. */ | |
1051 gimplify_assign (lhs, expr, &pre); | |
1052 } | |
1053 else | |
1054 gimplify_and_add (expr, &pre); | |
1055 | |
1056 input_location = saved_location; | |
1057 pop_gimplify_context (NULL); | |
1058 | |
1059 gimple_seq_add_seq (&pre, post); | |
1060 update_modified_stmts (pre); | |
1061 | |
1062 /* Add the sequence after IFN_VA_ARG. This splits the bb right | |
1063 after IFN_VA_ARG, and adds the sequence in one or more new bbs | |
1064 inbetween. */ | |
1065 gimple_find_sub_bbs (pre, &i); | |
1066 | |
1067 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the | |
1068 bb. */ | |
1069 unlink_stmt_vdef (stmt); | |
1070 release_ssa_name_fn (fun, gimple_vdef (stmt)); | |
1071 gsi_remove (&i, true); | |
1072 gcc_assert (gsi_end_p (i)); | |
1073 | |
1074 /* We're walking here into the bbs which contain the expansion of | |
1075 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs | |
1076 expanding. We could try to skip walking these bbs, perhaps by | |
1077 walking backwards over gimples and bbs. */ | |
1078 break; | |
1079 } | |
1080 | |
1081 if (!modified) | |
1082 return; | |
1083 | |
1084 free_dominance_info (CDI_DOMINATORS); | |
1085 update_ssa (TODO_update_ssa); | |
1086 } | |
1087 | |
1088 /* Expand IFN_VA_ARGs in FUN, if necessary. */ | |
1089 | |
1090 static void | |
1091 expand_ifn_va_arg (function *fun) | |
1092 { | |
1093 if ((fun->curr_properties & PROP_gimple_lva) == 0) | |
1094 expand_ifn_va_arg_1 (fun); | |
1095 | |
1096 if (flag_checking) | |
1097 { | |
1098 basic_block bb; | |
1099 gimple_stmt_iterator i; | |
1100 FOR_EACH_BB_FN (bb, fun) | |
1101 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
1102 gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG)); | |
1103 } | |
1104 } | |
1105 | |
1106 namespace { | |
1107 | |
1108 const pass_data pass_data_stdarg = | |
1109 { | |
1110 GIMPLE_PASS, /* type */ | |
1111 "stdarg", /* name */ | |
1112 OPTGROUP_NONE, /* optinfo_flags */ | |
1113 TV_NONE, /* tv_id */ | |
1114 ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1115 PROP_gimple_lva, /* properties_provided */ | |
1116 0, /* properties_destroyed */ | |
1117 0, /* todo_flags_start */ | |
1118 0, /* todo_flags_finish */ | |
1119 }; | |
1120 | |
1121 class pass_stdarg : public gimple_opt_pass | |
1122 { | |
1123 public: | |
1124 pass_stdarg (gcc::context *ctxt) | |
1125 : gimple_opt_pass (pass_data_stdarg, ctxt) | |
1126 {} | |
1127 | |
1128 /* opt_pass methods: */ | |
1129 virtual bool gate (function *) | |
1130 { | |
1131 /* Always run this pass, in order to expand va_arg internal_fns. We | |
1132 also need to do that if fun->stdarg == 0, because a va_arg may also | |
1133 occur in a function without varargs, f.i. if when passing a va_list to | |
1134 another function. */ | |
1135 return true; | |
1136 } | |
1137 | |
1138 virtual unsigned int execute (function *); | |
1139 | |
1140 }; // class pass_stdarg | |
1141 | |
1142 unsigned int | |
1143 pass_stdarg::execute (function *fun) | |
1144 { | |
1145 /* TODO: Postpone expand_ifn_va_arg till after | |
1146 optimize_va_list_gpr_fpr_size. */ | |
1147 expand_ifn_va_arg (fun); | |
1148 | |
1149 if (flag_stdarg_opt | |
1150 /* This optimization is only for stdarg functions. */ | |
1151 && fun->stdarg != 0) | |
1152 optimize_va_list_gpr_fpr_size (fun); | |
1153 | |
0 | 1154 return 0; |
1155 } | |
1156 | |
111 | 1157 } // anon namespace |
0 | 1158 |
111 | 1159 gimple_opt_pass * |
1160 make_pass_stdarg (gcc::context *ctxt) | |
1161 { | |
1162 return new pass_stdarg (ctxt); | |
1163 } | |
1164 | |
1165 namespace { | |
1166 | |
1167 const pass_data pass_data_lower_vaarg = | |
1168 { | |
1169 GIMPLE_PASS, /* type */ | |
1170 "lower_vaarg", /* name */ | |
1171 OPTGROUP_NONE, /* optinfo_flags */ | |
1172 TV_NONE, /* tv_id */ | |
1173 ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1174 PROP_gimple_lva, /* properties_provided */ | |
1175 0, /* properties_destroyed */ | |
1176 0, /* todo_flags_start */ | |
1177 0, /* todo_flags_finish */ | |
1178 }; | |
1179 | |
1180 class pass_lower_vaarg : public gimple_opt_pass | |
0 | 1181 { |
111 | 1182 public: |
1183 pass_lower_vaarg (gcc::context *ctxt) | |
1184 : gimple_opt_pass (pass_data_lower_vaarg, ctxt) | |
1185 {} | |
1186 | |
1187 /* opt_pass methods: */ | |
1188 virtual bool gate (function *) | |
1189 { | |
1190 return (cfun->curr_properties & PROP_gimple_lva) == 0; | |
1191 } | |
1192 | |
1193 virtual unsigned int execute (function *); | |
1194 | |
1195 }; // class pass_lower_vaarg | |
1196 | |
1197 unsigned int | |
1198 pass_lower_vaarg::execute (function *fun) | |
1199 { | |
1200 expand_ifn_va_arg (fun); | |
1201 return 0; | |
1202 } | |
1203 | |
1204 } // anon namespace | |
1205 | |
1206 gimple_opt_pass * | |
1207 make_pass_lower_vaarg (gcc::context *ctxt) | |
1208 { | |
1209 return new pass_lower_vaarg (ctxt); | |
1210 } |