Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-stdarg.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* Pass computing data for optimizing stdarg functions. | 1 /* Pass computing data for optimizing stdarg functions. |
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010 | 2 Copyright (C) 2004-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 Contributed by Jakub Jelinek <jakub@redhat.com> | 3 Contributed by Jakub Jelinek <jakub@redhat.com> |
5 | 4 |
6 This file is part of GCC. | 5 This file is part of GCC. |
7 | 6 |
8 GCC is free software; you can redistribute it and/or modify | 7 GCC is free software; you can redistribute it and/or modify |
20 <http://www.gnu.org/licenses/>. */ | 19 <http://www.gnu.org/licenses/>. */ |
21 | 20 |
22 #include "config.h" | 21 #include "config.h" |
23 #include "system.h" | 22 #include "system.h" |
24 #include "coretypes.h" | 23 #include "coretypes.h" |
25 #include "tm.h" | 24 #include "backend.h" |
25 #include "target.h" | |
26 #include "tree.h" | 26 #include "tree.h" |
27 #include "function.h" | 27 #include "gimple.h" |
28 #include "tree-pass.h" | |
29 #include "ssa.h" | |
30 #include "gimple-pretty-print.h" | |
31 #include "fold-const.h" | |
28 #include "langhooks.h" | 32 #include "langhooks.h" |
29 #include "gimple-pretty-print.h" | 33 #include "gimple-iterator.h" |
30 #include "target.h" | 34 #include "gimple-walk.h" |
31 #include "tree-flow.h" | 35 #include "gimplify.h" |
32 #include "tree-pass.h" | 36 #include "tree-into-ssa.h" |
37 #include "tree-cfg.h" | |
33 #include "tree-stdarg.h" | 38 #include "tree-stdarg.h" |
39 #include "tree-chkp.h" | |
34 | 40 |
35 /* A simple pass that attempts to optimize stdarg functions on architectures | 41 /* A simple pass that attempts to optimize stdarg functions on architectures |
36 that need to save register arguments to stack on entry to stdarg functions. | 42 that need to save register arguments to stack on entry to stdarg functions. |
37 If the function doesn't use any va_start macros, no registers need to | 43 If the function doesn't use any va_start macros, no registers need to |
38 be saved. If va_start macros are used, the va_list variables don't escape | 44 be saved. If va_start macros are used, the va_list variables don't escape |
45 is executed at most as many times as VA_START_BB. */ | 51 is executed at most as many times as VA_START_BB. */ |
46 | 52 |
47 static bool | 53 static bool |
48 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) | 54 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) |
49 { | 55 { |
50 VEC (edge, heap) *stack = NULL; | 56 auto_vec<edge, 10> stack; |
51 edge e; | 57 edge e; |
52 edge_iterator ei; | 58 edge_iterator ei; |
53 sbitmap visited; | |
54 bool ret; | 59 bool ret; |
55 | 60 |
56 if (va_arg_bb == va_start_bb) | 61 if (va_arg_bb == va_start_bb) |
57 return true; | 62 return true; |
58 | 63 |
59 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) | 64 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) |
60 return false; | 65 return false; |
61 | 66 |
62 visited = sbitmap_alloc (last_basic_block); | 67 auto_sbitmap visited (last_basic_block_for_fn (cfun)); |
63 sbitmap_zero (visited); | 68 bitmap_clear (visited); |
64 ret = true; | 69 ret = true; |
65 | 70 |
66 FOR_EACH_EDGE (e, ei, va_arg_bb->preds) | 71 FOR_EACH_EDGE (e, ei, va_arg_bb->preds) |
67 VEC_safe_push (edge, heap, stack, e); | 72 stack.safe_push (e); |
68 | 73 |
69 while (! VEC_empty (edge, stack)) | 74 while (! stack.is_empty ()) |
70 { | 75 { |
71 basic_block src; | 76 basic_block src; |
72 | 77 |
73 e = VEC_pop (edge, stack); | 78 e = stack.pop (); |
74 src = e->src; | 79 src = e->src; |
75 | 80 |
76 if (e->flags & EDGE_COMPLEX) | 81 if (e->flags & EDGE_COMPLEX) |
77 { | 82 { |
78 ret = false; | 83 ret = false; |
87 { | 92 { |
88 ret = false; | 93 ret = false; |
89 break; | 94 break; |
90 } | 95 } |
91 | 96 |
92 gcc_assert (src != ENTRY_BLOCK_PTR); | 97 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
93 | 98 |
94 if (! TEST_BIT (visited, src->index)) | 99 if (! bitmap_bit_p (visited, src->index)) |
95 { | 100 { |
96 SET_BIT (visited, src->index); | 101 bitmap_set_bit (visited, src->index); |
97 FOR_EACH_EDGE (e, ei, src->preds) | 102 FOR_EACH_EDGE (e, ei, src->preds) |
98 VEC_safe_push (edge, heap, stack, e); | 103 stack.safe_push (e); |
99 } | 104 } |
100 } | 105 } |
101 | 106 |
102 VEC_free (edge, heap, stack); | |
103 sbitmap_free (visited); | |
104 return ret; | 107 return ret; |
105 } | 108 } |
106 | 109 |
107 | 110 |
108 /* For statement COUNTER = RHS, if RHS is COUNTER + constant, | 111 /* For statement COUNTER = RHS, if RHS is COUNTER + constant, |
109 return constant, otherwise return (unsigned HOST_WIDE_INT) -1. | 112 return constant, otherwise return HOST_WIDE_INT_M1U. |
110 GPR_P is true if this is GPR counter. */ | 113 GPR_P is true if this is GPR counter. */ |
111 | 114 |
112 static unsigned HOST_WIDE_INT | 115 static unsigned HOST_WIDE_INT |
113 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs, | 116 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs, |
114 bool gpr_p) | 117 bool gpr_p) |
115 { | 118 { |
116 tree lhs, orig_lhs; | 119 tree lhs, orig_lhs; |
117 gimple stmt; | 120 gimple *stmt; |
118 unsigned HOST_WIDE_INT ret = 0, val, counter_val; | 121 unsigned HOST_WIDE_INT ret = 0, val, counter_val; |
119 unsigned int max_size; | 122 unsigned int max_size; |
120 | 123 |
121 if (si->offsets == NULL) | 124 if (si->offsets == NULL) |
122 { | 125 { |
131 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE; | 134 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE; |
132 orig_lhs = lhs = rhs; | 135 orig_lhs = lhs = rhs; |
133 while (lhs) | 136 while (lhs) |
134 { | 137 { |
135 enum tree_code rhs_code; | 138 enum tree_code rhs_code; |
139 tree rhs1; | |
136 | 140 |
137 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) | 141 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) |
138 { | 142 { |
139 if (counter_val >= max_size) | 143 if (counter_val >= max_size) |
140 { | 144 { |
147 } | 151 } |
148 | 152 |
149 stmt = SSA_NAME_DEF_STMT (lhs); | 153 stmt = SSA_NAME_DEF_STMT (lhs); |
150 | 154 |
151 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs) | 155 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs) |
152 return (unsigned HOST_WIDE_INT) -1; | 156 return HOST_WIDE_INT_M1U; |
153 | 157 |
154 rhs_code = gimple_assign_rhs_code (stmt); | 158 rhs_code = gimple_assign_rhs_code (stmt); |
159 rhs1 = gimple_assign_rhs1 (stmt); | |
155 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS | 160 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS |
156 || gimple_assign_cast_p (stmt)) | 161 || gimple_assign_cast_p (stmt)) |
157 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | 162 && TREE_CODE (rhs1) == SSA_NAME) |
158 { | 163 { |
159 lhs = gimple_assign_rhs1 (stmt); | 164 lhs = rhs1; |
160 continue; | 165 continue; |
161 } | 166 } |
162 | 167 |
163 if ((rhs_code == POINTER_PLUS_EXPR | 168 if ((rhs_code == POINTER_PLUS_EXPR |
164 || rhs_code == PLUS_EXPR) | 169 || rhs_code == PLUS_EXPR) |
165 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | 170 && TREE_CODE (rhs1) == SSA_NAME |
166 && host_integerp (gimple_assign_rhs2 (stmt), 1)) | 171 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))) |
167 { | 172 { |
168 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1); | 173 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt)); |
169 lhs = gimple_assign_rhs1 (stmt); | 174 lhs = rhs1; |
170 continue; | 175 continue; |
171 } | 176 } |
172 | 177 |
178 if (rhs_code == ADDR_EXPR | |
179 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF | |
180 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME | |
181 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1))) | |
182 { | |
183 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)); | |
184 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); | |
185 continue; | |
186 } | |
187 | |
173 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS) | 188 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS) |
174 return (unsigned HOST_WIDE_INT) -1; | 189 return HOST_WIDE_INT_M1U; |
175 | 190 |
176 rhs = gimple_assign_rhs1 (stmt); | 191 rhs = gimple_assign_rhs1 (stmt); |
177 if (TREE_CODE (counter) != TREE_CODE (rhs)) | 192 if (TREE_CODE (counter) != TREE_CODE (rhs)) |
178 return (unsigned HOST_WIDE_INT) -1; | 193 return HOST_WIDE_INT_M1U; |
179 | 194 |
180 if (TREE_CODE (counter) == COMPONENT_REF) | 195 if (TREE_CODE (counter) == COMPONENT_REF) |
181 { | 196 { |
182 if (get_base_address (counter) != get_base_address (rhs) | 197 if (get_base_address (counter) != get_base_address (rhs) |
183 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL | 198 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL |
184 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1)) | 199 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1)) |
185 return (unsigned HOST_WIDE_INT) -1; | 200 return HOST_WIDE_INT_M1U; |
186 } | 201 } |
187 else if (counter != rhs) | 202 else if (counter != rhs) |
188 return (unsigned HOST_WIDE_INT) -1; | 203 return HOST_WIDE_INT_M1U; |
189 | 204 |
190 lhs = NULL; | 205 lhs = NULL; |
191 } | 206 } |
192 | 207 |
193 lhs = orig_lhs; | 208 lhs = orig_lhs; |
194 val = ret + counter_val; | 209 val = ret + counter_val; |
195 while (lhs) | 210 while (lhs) |
196 { | 211 { |
197 enum tree_code rhs_code; | 212 enum tree_code rhs_code; |
213 tree rhs1; | |
198 | 214 |
199 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) | 215 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) |
200 break; | 216 break; |
201 | 217 |
202 if (val >= max_size) | 218 if (val >= max_size) |
205 si->offsets[SSA_NAME_VERSION (lhs)] = val; | 221 si->offsets[SSA_NAME_VERSION (lhs)] = val; |
206 | 222 |
207 stmt = SSA_NAME_DEF_STMT (lhs); | 223 stmt = SSA_NAME_DEF_STMT (lhs); |
208 | 224 |
209 rhs_code = gimple_assign_rhs_code (stmt); | 225 rhs_code = gimple_assign_rhs_code (stmt); |
226 rhs1 = gimple_assign_rhs1 (stmt); | |
210 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS | 227 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS |
211 || gimple_assign_cast_p (stmt)) | 228 || gimple_assign_cast_p (stmt)) |
212 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | 229 && TREE_CODE (rhs1) == SSA_NAME) |
213 { | 230 { |
214 lhs = gimple_assign_rhs1 (stmt); | 231 lhs = rhs1; |
215 continue; | 232 continue; |
216 } | 233 } |
217 | 234 |
218 if ((rhs_code == POINTER_PLUS_EXPR | 235 if ((rhs_code == POINTER_PLUS_EXPR |
219 || rhs_code == PLUS_EXPR) | 236 || rhs_code == PLUS_EXPR) |
220 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | 237 && TREE_CODE (rhs1) == SSA_NAME |
221 && host_integerp (gimple_assign_rhs2 (stmt), 1)) | 238 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))) |
222 { | 239 { |
223 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1); | 240 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt)); |
224 lhs = gimple_assign_rhs1 (stmt); | 241 lhs = rhs1; |
242 continue; | |
243 } | |
244 | |
245 if (rhs_code == ADDR_EXPR | |
246 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF | |
247 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME | |
248 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1))) | |
249 { | |
250 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)); | |
251 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); | |
225 continue; | 252 continue; |
226 } | 253 } |
227 | 254 |
228 lhs = NULL; | 255 lhs = NULL; |
229 } | 256 } |
240 { | 267 { |
241 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info; | 268 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info; |
242 tree var = *tp; | 269 tree var = *tp; |
243 | 270 |
244 if (TREE_CODE (var) == SSA_NAME) | 271 if (TREE_CODE (var) == SSA_NAME) |
245 var = SSA_NAME_VAR (var); | 272 { |
246 | 273 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var))) |
247 if (TREE_CODE (var) == VAR_DECL | 274 return var; |
248 && bitmap_bit_p (va_list_vars, DECL_UID (var))) | 275 } |
249 return var; | 276 else if (VAR_P (var)) |
277 { | |
278 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names)) | |
279 return var; | |
280 } | |
250 | 281 |
251 return NULL_TREE; | 282 return NULL_TREE; |
252 } | 283 } |
253 | 284 |
254 | 285 |
321 if (TREE_CODE (ap) != COMPONENT_REF | 352 if (TREE_CODE (ap) != COMPONENT_REF |
322 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL) | 353 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL) |
323 return false; | 354 return false; |
324 | 355 |
325 if (TREE_CODE (var) != SSA_NAME | 356 if (TREE_CODE (var) != SSA_NAME |
326 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var)))) | 357 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var))) |
327 return false; | 358 return false; |
328 | 359 |
329 base = get_base_address (ap); | 360 base = get_base_address (ap); |
330 if (TREE_CODE (base) != VAR_DECL | 361 if (!VAR_P (base) |
331 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base))) | 362 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names)) |
332 return false; | 363 return false; |
333 | 364 |
334 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field) | 365 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field) |
335 va_list_counter_op (si, ap, var, true, write_p); | 366 va_list_counter_op (si, ap, var, true, write_p); |
336 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field) | 367 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field) |
344 search for va_list references in the statement. */ | 375 search for va_list references in the statement. */ |
345 | 376 |
346 static bool | 377 static bool |
347 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem) | 378 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem) |
348 { | 379 { |
349 if (TREE_CODE (ap) != VAR_DECL | 380 if (!VAR_P (ap) |
350 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap))) | 381 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) |
351 return false; | 382 return false; |
352 | 383 |
353 if (TREE_CODE (tem) != SSA_NAME | 384 if (TREE_CODE (tem) != SSA_NAME |
354 || bitmap_bit_p (si->va_list_vars, | 385 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem))) |
355 DECL_UID (SSA_NAME_VAR (tem))) | |
356 || is_global_var (SSA_NAME_VAR (tem))) | |
357 return false; | 386 return false; |
358 | 387 |
359 if (si->compute_sizes < 0) | 388 if (si->compute_sizes < 0) |
360 { | 389 { |
361 si->compute_sizes = 0; | 390 si->compute_sizes = 0; |
374 If va_arg is used in a loop, we don't know how many registers need | 403 If va_arg is used in a loop, we don't know how many registers need |
375 saving. */ | 404 saving. */ |
376 if (! si->compute_sizes) | 405 if (! si->compute_sizes) |
377 return false; | 406 return false; |
378 | 407 |
379 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1) | 408 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U) |
380 return false; | 409 return false; |
381 | 410 |
382 /* Note the temporary, as we need to track whether it doesn't escape | 411 /* Note the temporary, as we need to track whether it doesn't escape |
383 the current function. */ | 412 the current function. */ |
384 bitmap_set_bit (si->va_list_escape_vars, | 413 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem)); |
385 DECL_UID (SSA_NAME_VAR (tem))); | 414 |
386 return true; | 415 return true; |
387 } | 416 } |
388 | 417 |
389 | 418 |
390 /* Check for: | 419 /* Check for: |
396 static bool | 425 static bool |
397 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2) | 426 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2) |
398 { | 427 { |
399 unsigned HOST_WIDE_INT increment; | 428 unsigned HOST_WIDE_INT increment; |
400 | 429 |
401 if (TREE_CODE (ap) != VAR_DECL | 430 if (!VAR_P (ap) |
402 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap))) | 431 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) |
403 return false; | 432 return false; |
404 | 433 |
405 if (TREE_CODE (tem2) != SSA_NAME | 434 if (TREE_CODE (tem2) != SSA_NAME |
406 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2)))) | 435 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2))) |
407 return false; | 436 return false; |
408 | 437 |
409 if (si->compute_sizes <= 0) | 438 if (si->compute_sizes <= 0) |
410 return false; | 439 return false; |
411 | 440 |
431 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs) | 460 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs) |
432 { | 461 { |
433 if (! POINTER_TYPE_P (TREE_TYPE (rhs))) | 462 if (! POINTER_TYPE_P (TREE_TYPE (rhs))) |
434 return; | 463 return; |
435 | 464 |
436 if (TREE_CODE (rhs) != SSA_NAME | 465 if (TREE_CODE (rhs) == SSA_NAME) |
437 || ! bitmap_bit_p (si->va_list_escape_vars, | 466 { |
438 DECL_UID (SSA_NAME_VAR (rhs)))) | 467 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs))) |
468 return; | |
469 } | |
470 else if (TREE_CODE (rhs) == ADDR_EXPR | |
471 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF | |
472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME) | |
473 { | |
474 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0); | |
475 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr))) | |
476 return; | |
477 } | |
478 else | |
439 return; | 479 return; |
440 | 480 |
441 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs))) | 481 if (TREE_CODE (lhs) != SSA_NAME) |
442 { | 482 { |
443 si->va_list_escapes = true; | 483 si->va_list_escapes = true; |
444 return; | 484 return; |
445 } | 485 } |
446 | 486 |
466 si->va_list_escapes = true; | 506 si->va_list_escapes = true; |
467 return; | 507 return; |
468 } | 508 } |
469 | 509 |
470 if (va_list_counter_bump (si, si->va_start_ap, lhs, true) | 510 if (va_list_counter_bump (si, si->va_start_ap, lhs, true) |
471 == (unsigned HOST_WIDE_INT) -1) | 511 == HOST_WIDE_INT_M1U) |
472 { | 512 { |
473 si->va_list_escapes = true; | 513 si->va_list_escapes = true; |
474 return; | 514 return; |
475 } | 515 } |
476 | 516 |
477 bitmap_set_bit (si->va_list_escape_vars, | 517 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs)); |
478 DECL_UID (SSA_NAME_VAR (lhs))); | |
479 } | 518 } |
480 | 519 |
481 | 520 |
482 /* Check all uses of temporaries from si->va_list_escape_vars bitmap. | 521 /* Check all uses of temporaries from si->va_list_escape_vars bitmap. |
483 Return true if va_list might be escaping. */ | 522 Return true if va_list might be escaping. */ |
485 static bool | 524 static bool |
486 check_all_va_list_escapes (struct stdarg_info *si) | 525 check_all_va_list_escapes (struct stdarg_info *si) |
487 { | 526 { |
488 basic_block bb; | 527 basic_block bb; |
489 | 528 |
490 FOR_EACH_BB (bb) | 529 FOR_EACH_BB_FN (bb, cfun) |
491 { | 530 { |
492 gimple_stmt_iterator i; | 531 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i); |
493 | 532 gsi_next (&i)) |
494 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | 533 { |
495 { | 534 tree lhs; |
496 gimple stmt = gsi_stmt (i); | 535 use_operand_p uop; |
536 ssa_op_iter soi; | |
537 gphi *phi = i.phi (); | |
538 | |
539 lhs = PHI_RESULT (phi); | |
540 if (virtual_operand_p (lhs) | |
541 || bitmap_bit_p (si->va_list_escape_vars, | |
542 SSA_NAME_VERSION (lhs))) | |
543 continue; | |
544 | |
545 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) | |
546 { | |
547 tree rhs = USE_FROM_PTR (uop); | |
548 if (TREE_CODE (rhs) == SSA_NAME | |
549 && bitmap_bit_p (si->va_list_escape_vars, | |
550 SSA_NAME_VERSION (rhs))) | |
551 { | |
552 if (dump_file && (dump_flags & TDF_DETAILS)) | |
553 { | |
554 fputs ("va_list escapes in ", dump_file); | |
555 print_gimple_stmt (dump_file, phi, 0, dump_flags); | |
556 fputc ('\n', dump_file); | |
557 } | |
558 return true; | |
559 } | |
560 } | |
561 } | |
562 | |
563 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i); | |
564 gsi_next (&i)) | |
565 { | |
566 gimple *stmt = gsi_stmt (i); | |
497 tree use; | 567 tree use; |
498 ssa_op_iter iter; | 568 ssa_op_iter iter; |
499 | 569 |
500 if (is_gimple_debug (stmt)) | 570 if (is_gimple_debug (stmt)) |
501 continue; | 571 continue; |
502 | 572 |
503 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES) | 573 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES) |
504 { | 574 { |
505 if (! bitmap_bit_p (si->va_list_escape_vars, | 575 if (! bitmap_bit_p (si->va_list_escape_vars, |
506 DECL_UID (SSA_NAME_VAR (use)))) | 576 SSA_NAME_VERSION (use))) |
507 continue; | 577 continue; |
508 | 578 |
509 if (is_gimple_assign (stmt)) | 579 if (is_gimple_assign (stmt)) |
510 { | 580 { |
511 tree rhs = gimple_assign_rhs1 (stmt); | 581 tree rhs = gimple_assign_rhs1 (stmt); |
512 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | 582 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); |
513 | 583 |
514 /* x = *ap_temp; */ | 584 /* x = *ap_temp; */ |
515 if (gimple_assign_rhs_code (stmt) == MEM_REF | 585 if (rhs_code == MEM_REF |
516 && TREE_OPERAND (rhs, 0) == use | 586 && TREE_OPERAND (rhs, 0) == use |
517 && TYPE_SIZE_UNIT (TREE_TYPE (rhs)) | 587 && TYPE_SIZE_UNIT (TREE_TYPE (rhs)) |
518 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1) | 588 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs))) |
519 && si->offsets[SSA_NAME_VERSION (use)] != -1) | 589 && si->offsets[SSA_NAME_VERSION (use)] != -1) |
520 { | 590 { |
521 unsigned HOST_WIDE_INT gpr_size; | 591 unsigned HOST_WIDE_INT gpr_size; |
522 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs)); | 592 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs)); |
523 | 593 |
524 gpr_size = si->offsets[SSA_NAME_VERSION (use)] | 594 gpr_size = si->offsets[SSA_NAME_VERSION (use)] |
525 + tree_low_cst (TREE_OPERAND (rhs, 1), 0) | 595 + tree_to_shwi (TREE_OPERAND (rhs, 1)) |
526 + tree_low_cst (access_size, 1); | 596 + tree_to_uhwi (access_size); |
527 if (gpr_size >= VA_LIST_MAX_GPR_SIZE) | 597 if (gpr_size >= VA_LIST_MAX_GPR_SIZE) |
528 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | 598 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; |
529 else if (gpr_size > cfun->va_list_gpr_size) | 599 else if (gpr_size > cfun->va_list_gpr_size) |
530 cfun->va_list_gpr_size = gpr_size; | 600 cfun->va_list_gpr_size = gpr_size; |
531 continue; | 601 continue; |
547 { | 617 { |
548 tree lhs = gimple_assign_lhs (stmt); | 618 tree lhs = gimple_assign_lhs (stmt); |
549 | 619 |
550 if (TREE_CODE (lhs) == SSA_NAME | 620 if (TREE_CODE (lhs) == SSA_NAME |
551 && bitmap_bit_p (si->va_list_escape_vars, | 621 && bitmap_bit_p (si->va_list_escape_vars, |
552 DECL_UID (SSA_NAME_VAR (lhs)))) | 622 SSA_NAME_VERSION (lhs))) |
553 continue; | 623 continue; |
554 | 624 |
555 if (TREE_CODE (lhs) == VAR_DECL | 625 if (VAR_P (lhs) |
556 && bitmap_bit_p (si->va_list_vars, | 626 && bitmap_bit_p (si->va_list_vars, |
557 DECL_UID (lhs))) | 627 DECL_UID (lhs) + num_ssa_names)) |
628 continue; | |
629 } | |
630 else if (rhs_code == ADDR_EXPR | |
631 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF | |
632 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use) | |
633 { | |
634 tree lhs = gimple_assign_lhs (stmt); | |
635 | |
636 if (bitmap_bit_p (si->va_list_escape_vars, | |
637 SSA_NAME_VERSION (lhs))) | |
558 continue; | 638 continue; |
559 } | 639 } |
560 } | 640 } |
561 | 641 |
562 if (dump_file && (dump_flags & TDF_DETAILS)) | 642 if (dump_file && (dump_flags & TDF_DETAILS)) |
571 } | 651 } |
572 | 652 |
573 return false; | 653 return false; |
574 } | 654 } |
575 | 655 |
576 | 656 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */ |
577 /* Return true if this optimization pass should be done. | 657 |
578 It makes only sense for stdarg functions. */ | 658 static void |
579 | 659 optimize_va_list_gpr_fpr_size (function *fun) |
580 static bool | |
581 gate_optimize_stdarg (void) | |
582 { | |
583 /* This optimization is only for stdarg functions. */ | |
584 return cfun->stdarg != 0; | |
585 } | |
586 | |
587 | |
588 /* Entry point to the stdarg optimization pass. */ | |
589 | |
590 static unsigned int | |
591 execute_optimize_stdarg (void) | |
592 { | 660 { |
593 basic_block bb; | 661 basic_block bb; |
594 bool va_list_escapes = false; | 662 bool va_list_escapes = false; |
595 bool va_list_simple_ptr; | 663 bool va_list_simple_ptr; |
596 struct stdarg_info si; | 664 struct stdarg_info si; |
597 struct walk_stmt_info wi; | 665 struct walk_stmt_info wi; |
598 const char *funcname = NULL; | 666 const char *funcname = NULL; |
599 tree cfun_va_list; | 667 tree cfun_va_list; |
600 | 668 |
601 cfun->va_list_gpr_size = 0; | 669 fun->va_list_gpr_size = 0; |
602 cfun->va_list_fpr_size = 0; | 670 fun->va_list_fpr_size = 0; |
603 memset (&si, 0, sizeof (si)); | 671 memset (&si, 0, sizeof (si)); |
604 si.va_list_vars = BITMAP_ALLOC (NULL); | 672 si.va_list_vars = BITMAP_ALLOC (NULL); |
605 si.va_list_escape_vars = BITMAP_ALLOC (NULL); | 673 si.va_list_escape_vars = BITMAP_ALLOC (NULL); |
606 | 674 |
607 if (dump_file) | 675 if (dump_file) |
608 funcname = lang_hooks.decl_printable_name (current_function_decl, 2); | 676 funcname = lang_hooks.decl_printable_name (current_function_decl, 2); |
609 | 677 |
610 cfun_va_list = targetm.fn_abi_va_list (cfun->decl); | 678 cfun_va_list = targetm.fn_abi_va_list (fun->decl); |
611 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | 679 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) |
612 && (TREE_TYPE (cfun_va_list) == void_type_node | 680 && (TREE_TYPE (cfun_va_list) == void_type_node |
613 || TREE_TYPE (cfun_va_list) == char_type_node); | 681 || TREE_TYPE (cfun_va_list) == char_type_node); |
614 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); | 682 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); |
615 | 683 |
616 FOR_EACH_BB (bb) | 684 FOR_EACH_BB_FN (bb, fun) |
617 { | 685 { |
618 gimple_stmt_iterator i; | 686 gimple_stmt_iterator i; |
619 | 687 |
620 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | 688 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
621 { | 689 { |
622 gimple stmt = gsi_stmt (i); | 690 gimple *stmt = gsi_stmt (i); |
623 tree callee, ap; | 691 tree callee, ap; |
624 | 692 |
625 if (!is_gimple_call (stmt)) | 693 if (!is_gimple_call (stmt)) |
626 continue; | 694 continue; |
627 | 695 |
660 break; | 728 break; |
661 } | 729 } |
662 ap = TREE_OPERAND (ap, 0); | 730 ap = TREE_OPERAND (ap, 0); |
663 } | 731 } |
664 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap)) | 732 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap)) |
665 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl)) | 733 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl)) |
666 || TREE_CODE (ap) != VAR_DECL) | 734 || !VAR_P (ap)) |
667 { | 735 { |
668 va_list_escapes = true; | 736 va_list_escapes = true; |
669 break; | 737 break; |
670 } | 738 } |
671 | 739 |
673 { | 741 { |
674 va_list_escapes = true; | 742 va_list_escapes = true; |
675 break; | 743 break; |
676 } | 744 } |
677 | 745 |
678 bitmap_set_bit (si.va_list_vars, DECL_UID (ap)); | 746 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names); |
679 | 747 |
680 /* VA_START_BB and VA_START_AP will be only used if there is just | 748 /* VA_START_BB and VA_START_AP will be only used if there is just |
681 one va_start in the function. */ | 749 one va_start in the function. */ |
682 si.va_start_bb = bb; | 750 si.va_start_bb = bb; |
683 si.va_start_ap = ap; | 751 si.va_start_ap = ap; |
715 } | 783 } |
716 | 784 |
717 /* For void * or char * va_list there is just one counter | 785 /* For void * or char * va_list there is just one counter |
718 (va_list itself). Use VA_LIST_GPR_SIZE for it. */ | 786 (va_list itself). Use VA_LIST_GPR_SIZE for it. */ |
719 if (va_list_simple_ptr) | 787 if (va_list_simple_ptr) |
720 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | 788 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; |
721 | 789 |
722 calculate_dominance_info (CDI_DOMINATORS); | 790 calculate_dominance_info (CDI_DOMINATORS); |
723 memset (&wi, 0, sizeof (wi)); | 791 memset (&wi, 0, sizeof (wi)); |
724 wi.info = si.va_list_vars; | 792 wi.info = si.va_list_vars; |
725 | 793 |
726 FOR_EACH_BB (bb) | 794 FOR_EACH_BB_FN (bb, fun) |
727 { | 795 { |
728 gimple_stmt_iterator i; | |
729 | |
730 si.compute_sizes = -1; | 796 si.compute_sizes = -1; |
731 si.bb = bb; | 797 si.bb = bb; |
732 | 798 |
733 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat | 799 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat |
734 them as assignments for the purpose of escape analysis. This is | 800 them as assignments for the purpose of escape analysis. This is |
735 not needed for non-simple va_list because virtual phis don't perform | 801 not needed for non-simple va_list because virtual phis don't perform |
736 any real data movement. */ | 802 any real data movement. Also, check PHI nodes for taking address of |
737 if (va_list_simple_ptr) | 803 the va_list vars. */ |
738 { | 804 tree lhs, rhs; |
739 tree lhs, rhs; | 805 use_operand_p uop; |
740 use_operand_p uop; | 806 ssa_op_iter soi; |
741 ssa_op_iter soi; | 807 |
742 | 808 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i); |
743 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) | 809 gsi_next (&i)) |
810 { | |
811 gphi *phi = i.phi (); | |
812 lhs = PHI_RESULT (phi); | |
813 | |
814 if (virtual_operand_p (lhs)) | |
815 continue; | |
816 | |
817 if (va_list_simple_ptr) | |
744 { | 818 { |
745 gimple phi = gsi_stmt (i); | |
746 lhs = PHI_RESULT (phi); | |
747 | |
748 if (!is_gimple_reg (lhs)) | |
749 continue; | |
750 | |
751 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) | 819 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) |
752 { | 820 { |
753 rhs = USE_FROM_PTR (uop); | 821 rhs = USE_FROM_PTR (uop); |
754 if (va_list_ptr_read (&si, rhs, lhs)) | 822 if (va_list_ptr_read (&si, rhs, lhs)) |
755 continue; | 823 continue; |
768 } | 836 } |
769 va_list_escapes = true; | 837 va_list_escapes = true; |
770 } | 838 } |
771 } | 839 } |
772 } | 840 } |
773 } | 841 |
774 | 842 for (unsigned j = 0; !va_list_escapes |
775 for (i = gsi_start_bb (bb); | 843 && j < gimple_phi_num_args (phi); ++j) |
844 if ((!va_list_simple_ptr | |
845 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME) | |
846 && walk_tree (gimple_phi_arg_def_ptr (phi, j), | |
847 find_va_list_reference, &wi, NULL)) | |
848 { | |
849 if (dump_file && (dump_flags & TDF_DETAILS)) | |
850 { | |
851 fputs ("va_list escapes in ", dump_file); | |
852 print_gimple_stmt (dump_file, phi, 0, dump_flags); | |
853 fputc ('\n', dump_file); | |
854 } | |
855 va_list_escapes = true; | |
856 } | |
857 } | |
858 | |
859 for (gimple_stmt_iterator i = gsi_start_bb (bb); | |
776 !gsi_end_p (i) && !va_list_escapes; | 860 !gsi_end_p (i) && !va_list_escapes; |
777 gsi_next (&i)) | 861 gsi_next (&i)) |
778 { | 862 { |
779 gimple stmt = gsi_stmt (i); | 863 gimple *stmt = gsi_stmt (i); |
780 | 864 |
781 /* Don't look at __builtin_va_{start,end}, they are ok. */ | 865 /* Don't look at __builtin_va_{start,end}, they are ok. */ |
782 if (is_gimple_call (stmt)) | 866 if (is_gimple_call (stmt)) |
783 { | 867 { |
784 tree callee = gimple_call_fndecl (stmt); | 868 tree callee = gimple_call_fndecl (stmt); |
790 continue; | 874 continue; |
791 } | 875 } |
792 | 876 |
793 if (is_gimple_assign (stmt)) | 877 if (is_gimple_assign (stmt)) |
794 { | 878 { |
795 tree lhs = gimple_assign_lhs (stmt); | 879 lhs = gimple_assign_lhs (stmt); |
796 tree rhs = gimple_assign_rhs1 (stmt); | 880 rhs = gimple_assign_rhs1 (stmt); |
797 | 881 |
798 if (va_list_simple_ptr) | 882 if (va_list_simple_ptr) |
799 { | 883 { |
800 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | 884 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) |
801 == GIMPLE_SINGLE_RHS) | 885 == GIMPLE_SINGLE_RHS) |
802 { | 886 { |
887 /* Check for ap ={v} {}. */ | |
888 if (TREE_CLOBBER_P (rhs)) | |
889 continue; | |
890 | |
803 /* Check for tem = ap. */ | 891 /* Check for tem = ap. */ |
804 if (va_list_ptr_read (&si, rhs, lhs)) | 892 else if (va_list_ptr_read (&si, rhs, lhs)) |
805 continue; | 893 continue; |
806 | 894 |
807 /* Check for the last insn in: | 895 /* Check for the last insn in: |
808 tem1 = ap; | 896 tem1 = ap; |
809 tem2 = tem1 + CST; | 897 tem2 = tem1 + CST; |
823 else | 911 else |
824 { | 912 { |
825 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | 913 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) |
826 == GIMPLE_SINGLE_RHS) | 914 == GIMPLE_SINGLE_RHS) |
827 { | 915 { |
916 /* Check for ap ={v} {}. */ | |
917 if (TREE_CLOBBER_P (rhs)) | |
918 continue; | |
919 | |
828 /* Check for ap[0].field = temp. */ | 920 /* Check for ap[0].field = temp. */ |
829 if (va_list_counter_struct_op (&si, lhs, rhs, true)) | 921 else if (va_list_counter_struct_op (&si, lhs, rhs, true)) |
830 continue; | 922 continue; |
831 | 923 |
832 /* Check for temp = ap[0].field. */ | 924 /* Check for temp = ap[0].field. */ |
833 else if (va_list_counter_struct_op (&si, rhs, lhs, | 925 else if (va_list_counter_struct_op (&si, rhs, lhs, |
834 false)) | 926 false)) |
874 va_list_escapes = true; | 966 va_list_escapes = true; |
875 | 967 |
876 finish: | 968 finish: |
877 if (va_list_escapes) | 969 if (va_list_escapes) |
878 { | 970 { |
879 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | 971 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; |
880 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | 972 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; |
881 } | 973 } |
882 BITMAP_FREE (si.va_list_vars); | 974 BITMAP_FREE (si.va_list_vars); |
883 BITMAP_FREE (si.va_list_escape_vars); | 975 BITMAP_FREE (si.va_list_escape_vars); |
884 free (si.offsets); | 976 free (si.offsets); |
885 if (dump_file) | 977 if (dump_file) |
886 { | 978 { |
887 fprintf (dump_file, "%s: va_list escapes %d, needs to save ", | 979 fprintf (dump_file, "%s: va_list escapes %d, needs to save ", |
888 funcname, (int) va_list_escapes); | 980 funcname, (int) va_list_escapes); |
889 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE) | 981 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE) |
890 fputs ("all", dump_file); | 982 fputs ("all", dump_file); |
891 else | 983 else |
892 fprintf (dump_file, "%d", cfun->va_list_gpr_size); | 984 fprintf (dump_file, "%d", cfun->va_list_gpr_size); |
893 fputs (" GPR units and ", dump_file); | 985 fputs (" GPR units and ", dump_file); |
894 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE) | 986 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE) |
895 fputs ("all", dump_file); | 987 fputs ("all", dump_file); |
896 else | 988 else |
897 fprintf (dump_file, "%d", cfun->va_list_fpr_size); | 989 fprintf (dump_file, "%d", cfun->va_list_fpr_size); |
898 fputs (" FPR units.\n", dump_file); | 990 fputs (" FPR units.\n", dump_file); |
899 } | 991 } |
992 } | |
993 | |
994 /* Expand IFN_VA_ARGs in FUN. */ | |
995 | |
996 static void | |
997 expand_ifn_va_arg_1 (function *fun) | |
998 { | |
999 bool modified = false; | |
1000 basic_block bb; | |
1001 gimple_stmt_iterator i; | |
1002 location_t saved_location; | |
1003 | |
1004 FOR_EACH_BB_FN (bb, fun) | |
1005 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
1006 { | |
1007 gimple *stmt = gsi_stmt (i); | |
1008 tree ap, aptype, expr, lhs, type; | |
1009 gimple_seq pre = NULL, post = NULL; | |
1010 | |
1011 if (!gimple_call_internal_p (stmt, IFN_VA_ARG)) | |
1012 continue; | |
1013 | |
1014 modified = true; | |
1015 | |
1016 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1))); | |
1017 ap = gimple_call_arg (stmt, 0); | |
1018 aptype = TREE_TYPE (gimple_call_arg (stmt, 2)); | |
1019 gcc_assert (POINTER_TYPE_P (aptype)); | |
1020 | |
1021 /* Balanced out the &ap, usually added by build_va_arg. */ | |
1022 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap, | |
1023 build_int_cst (aptype, 0)); | |
1024 | |
1025 push_gimplify_context (false); | |
1026 saved_location = input_location; | |
1027 input_location = gimple_location (stmt); | |
1028 | |
1029 /* Make it easier for the backends by protecting the valist argument | |
1030 from multiple evaluations. */ | |
1031 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue); | |
1032 | |
1033 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post); | |
1034 | |
1035 lhs = gimple_call_lhs (stmt); | |
1036 if (lhs != NULL_TREE) | |
1037 { | |
1038 unsigned int nargs = gimple_call_num_args (stmt); | |
1039 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type)); | |
1040 | |
1041 /* We replace call with a new expr. This may require | |
1042 corresponding bndret call fixup. */ | |
1043 if (chkp_function_instrumented_p (fun->decl)) | |
1044 chkp_fixup_inlined_call (lhs, expr); | |
1045 | |
1046 if (nargs == 4) | |
1047 { | |
1048 /* We've transported the size of with WITH_SIZE_EXPR here as | |
1049 the last argument of the internal fn call. Now reinstate | |
1050 it. */ | |
1051 tree size = gimple_call_arg (stmt, nargs - 1); | |
1052 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size); | |
1053 } | |
1054 | |
1055 /* We use gimplify_assign here, rather than gimple_build_assign, | |
1056 because gimple_assign knows how to deal with variable-sized | |
1057 types. */ | |
1058 gimplify_assign (lhs, expr, &pre); | |
1059 } | |
1060 else | |
1061 gimplify_and_add (expr, &pre); | |
1062 | |
1063 input_location = saved_location; | |
1064 pop_gimplify_context (NULL); | |
1065 | |
1066 gimple_seq_add_seq (&pre, post); | |
1067 update_modified_stmts (pre); | |
1068 | |
1069 /* Add the sequence after IFN_VA_ARG. This splits the bb right | |
1070 after IFN_VA_ARG, and adds the sequence in one or more new bbs | |
1071 inbetween. */ | |
1072 gimple_find_sub_bbs (pre, &i); | |
1073 | |
1074 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the | |
1075 bb. */ | |
1076 unlink_stmt_vdef (stmt); | |
1077 release_ssa_name_fn (fun, gimple_vdef (stmt)); | |
1078 gsi_remove (&i, true); | |
1079 gcc_assert (gsi_end_p (i)); | |
1080 | |
1081 /* We're walking here into the bbs which contain the expansion of | |
1082 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs | |
1083 expanding. We could try to skip walking these bbs, perhaps by | |
1084 walking backwards over gimples and bbs. */ | |
1085 break; | |
1086 } | |
1087 | |
1088 if (!modified) | |
1089 return; | |
1090 | |
1091 free_dominance_info (CDI_DOMINATORS); | |
1092 update_ssa (TODO_update_ssa); | |
1093 } | |
1094 | |
1095 /* Expand IFN_VA_ARGs in FUN, if necessary. */ | |
1096 | |
1097 static void | |
1098 expand_ifn_va_arg (function *fun) | |
1099 { | |
1100 if ((fun->curr_properties & PROP_gimple_lva) == 0) | |
1101 expand_ifn_va_arg_1 (fun); | |
1102 | |
1103 if (flag_checking) | |
1104 { | |
1105 basic_block bb; | |
1106 gimple_stmt_iterator i; | |
1107 FOR_EACH_BB_FN (bb, fun) | |
1108 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
1109 gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG)); | |
1110 } | |
1111 } | |
1112 | |
1113 namespace { | |
1114 | |
1115 const pass_data pass_data_stdarg = | |
1116 { | |
1117 GIMPLE_PASS, /* type */ | |
1118 "stdarg", /* name */ | |
1119 OPTGROUP_NONE, /* optinfo_flags */ | |
1120 TV_NONE, /* tv_id */ | |
1121 ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1122 PROP_gimple_lva, /* properties_provided */ | |
1123 0, /* properties_destroyed */ | |
1124 0, /* todo_flags_start */ | |
1125 0, /* todo_flags_finish */ | |
1126 }; | |
1127 | |
1128 class pass_stdarg : public gimple_opt_pass | |
1129 { | |
1130 public: | |
1131 pass_stdarg (gcc::context *ctxt) | |
1132 : gimple_opt_pass (pass_data_stdarg, ctxt) | |
1133 {} | |
1134 | |
1135 /* opt_pass methods: */ | |
1136 virtual bool gate (function *) | |
1137 { | |
1138 /* Always run this pass, in order to expand va_arg internal_fns. We | |
1139 also need to do that if fun->stdarg == 0, because a va_arg may also | |
1140 occur in a function without varargs, f.i. if when passing a va_list to | |
1141 another function. */ | |
1142 return true; | |
1143 } | |
1144 | |
1145 virtual unsigned int execute (function *); | |
1146 | |
1147 }; // class pass_stdarg | |
1148 | |
1149 unsigned int | |
1150 pass_stdarg::execute (function *fun) | |
1151 { | |
1152 /* TODO: Postpone expand_ifn_va_arg till after | |
1153 optimize_va_list_gpr_fpr_size. */ | |
1154 expand_ifn_va_arg (fun); | |
1155 | |
1156 if (flag_stdarg_opt | |
1157 /* This optimization is only for stdarg functions. */ | |
1158 && fun->stdarg != 0) | |
1159 optimize_va_list_gpr_fpr_size (fun); | |
1160 | |
900 return 0; | 1161 return 0; |
901 } | 1162 } |
902 | 1163 |
903 | 1164 } // anon namespace |
904 struct gimple_opt_pass pass_stdarg = | 1165 |
905 { | 1166 gimple_opt_pass * |
906 { | 1167 make_pass_stdarg (gcc::context *ctxt) |
907 GIMPLE_PASS, | 1168 { |
908 "stdarg", /* name */ | 1169 return new pass_stdarg (ctxt); |
909 gate_optimize_stdarg, /* gate */ | 1170 } |
910 execute_optimize_stdarg, /* execute */ | 1171 |
911 NULL, /* sub */ | 1172 namespace { |
912 NULL, /* next */ | 1173 |
913 0, /* static_pass_number */ | 1174 const pass_data pass_data_lower_vaarg = |
914 TV_NONE, /* tv_id */ | 1175 { |
915 PROP_cfg | PROP_ssa, /* properties_required */ | 1176 GIMPLE_PASS, /* type */ |
916 0, /* properties_provided */ | 1177 "lower_vaarg", /* name */ |
917 0, /* properties_destroyed */ | 1178 OPTGROUP_NONE, /* optinfo_flags */ |
918 0, /* todo_flags_start */ | 1179 TV_NONE, /* tv_id */ |
919 TODO_dump_func /* todo_flags_finish */ | 1180 ( PROP_cfg | PROP_ssa ), /* properties_required */ |
920 } | 1181 PROP_gimple_lva, /* properties_provided */ |
1182 0, /* properties_destroyed */ | |
1183 0, /* todo_flags_start */ | |
1184 0, /* todo_flags_finish */ | |
921 }; | 1185 }; |
1186 | |
1187 class pass_lower_vaarg : public gimple_opt_pass | |
1188 { | |
1189 public: | |
1190 pass_lower_vaarg (gcc::context *ctxt) | |
1191 : gimple_opt_pass (pass_data_lower_vaarg, ctxt) | |
1192 {} | |
1193 | |
1194 /* opt_pass methods: */ | |
1195 virtual bool gate (function *) | |
1196 { | |
1197 return (cfun->curr_properties & PROP_gimple_lva) == 0; | |
1198 } | |
1199 | |
1200 virtual unsigned int execute (function *); | |
1201 | |
1202 }; // class pass_lower_vaarg | |
1203 | |
1204 unsigned int | |
1205 pass_lower_vaarg::execute (function *fun) | |
1206 { | |
1207 expand_ifn_va_arg (fun); | |
1208 return 0; | |
1209 } | |
1210 | |
1211 } // anon namespace | |
1212 | |
1213 gimple_opt_pass * | |
1214 make_pass_lower_vaarg (gcc::context *ctxt) | |
1215 { | |
1216 return new pass_lower_vaarg (ctxt); | |
1217 } |