Mercurial > hg > CbC > CbC_gcc
comparison gcc/explow.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* Subroutines for manipulating rtx's in semantically interesting ways. | 1 /* Subroutines for manipulating rtx's in semantically interesting ways. |
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, | 2 Copyright (C) 1987-2017 Free Software Foundation, Inc. |
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 | |
4 Free Software Foundation, Inc. | |
5 | 3 |
6 This file is part of GCC. | 4 This file is part of GCC. |
7 | 5 |
8 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
9 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
21 | 19 |
22 | 20 |
23 #include "config.h" | 21 #include "config.h" |
24 #include "system.h" | 22 #include "system.h" |
25 #include "coretypes.h" | 23 #include "coretypes.h" |
26 #include "tm.h" | 24 #include "target.h" |
27 #include "diagnostic-core.h" | 25 #include "function.h" |
28 #include "rtl.h" | 26 #include "rtl.h" |
29 #include "tree.h" | 27 #include "tree.h" |
28 #include "memmodel.h" | |
30 #include "tm_p.h" | 29 #include "tm_p.h" |
31 #include "flags.h" | 30 #include "expmed.h" |
31 #include "profile-count.h" | |
32 #include "optabs.h" | |
33 #include "emit-rtl.h" | |
34 #include "recog.h" | |
35 #include "diagnostic-core.h" | |
36 #include "stor-layout.h" | |
32 #include "except.h" | 37 #include "except.h" |
33 #include "function.h" | 38 #include "dojump.h" |
39 #include "explow.h" | |
34 #include "expr.h" | 40 #include "expr.h" |
35 #include "optabs.h" | 41 #include "common/common-target.h" |
36 #include "libfuncs.h" | |
37 #include "hard-reg-set.h" | |
38 #include "insn-config.h" | |
39 #include "ggc.h" | |
40 #include "recog.h" | |
41 #include "langhooks.h" | |
42 #include "target.h" | |
43 #include "output.h" | 42 #include "output.h" |
43 #include "params.h" | |
44 | 44 |
45 static rtx break_out_memory_refs (rtx); | 45 static rtx break_out_memory_refs (rtx); |
46 static void anti_adjust_stack_and_probe_stack_clash (rtx); | |
46 | 47 |
47 | 48 |
48 /* Truncate and perhaps sign-extend C as appropriate for MODE. */ | 49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */ |
49 | 50 |
50 HOST_WIDE_INT | 51 HOST_WIDE_INT |
51 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) | 52 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode) |
52 { | 53 { |
53 int width = GET_MODE_BITSIZE (mode); | 54 /* Not scalar_int_mode because we also allow pointer bound modes. */ |
55 scalar_mode smode = as_a <scalar_mode> (mode); | |
56 int width = GET_MODE_PRECISION (smode); | |
54 | 57 |
55 /* You want to truncate to a _what_? */ | 58 /* You want to truncate to a _what_? */ |
56 gcc_assert (SCALAR_INT_MODE_P (mode)); | 59 gcc_assert (SCALAR_INT_MODE_P (mode) |
60 || POINTER_BOUNDS_MODE_P (mode)); | |
57 | 61 |
58 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ | 62 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ |
59 if (mode == BImode) | 63 if (smode == BImode) |
60 return c & 1 ? STORE_FLAG_VALUE : 0; | 64 return c & 1 ? STORE_FLAG_VALUE : 0; |
61 | 65 |
62 /* Sign-extend for the requested mode. */ | 66 /* Sign-extend for the requested mode. */ |
63 | 67 |
64 if (width < HOST_BITS_PER_WIDE_INT) | 68 if (width < HOST_BITS_PER_WIDE_INT) |
71 } | 75 } |
72 | 76 |
73 return c; | 77 return c; |
74 } | 78 } |
75 | 79 |
76 /* Return an rtx for the sum of X and the integer C. */ | 80 /* Return an rtx for the sum of X and the integer C, given that X has |
81 mode MODE. INPLACE is true if X can be modified inplace or false | |
82 if it must be treated as immutable. */ | |
77 | 83 |
78 rtx | 84 rtx |
79 plus_constant (rtx x, HOST_WIDE_INT c) | 85 plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c, |
86 bool inplace) | |
80 { | 87 { |
81 RTX_CODE code; | 88 RTX_CODE code; |
82 rtx y; | 89 rtx y; |
83 enum machine_mode mode; | |
84 rtx tem; | 90 rtx tem; |
85 int all_constant = 0; | 91 int all_constant = 0; |
86 | 92 |
93 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode); | |
94 | |
87 if (c == 0) | 95 if (c == 0) |
88 return x; | 96 return x; |
89 | 97 |
90 restart: | 98 restart: |
91 | 99 |
92 code = GET_CODE (x); | 100 code = GET_CODE (x); |
93 mode = GET_MODE (x); | |
94 y = x; | 101 y = x; |
95 | 102 |
96 switch (code) | 103 switch (code) |
97 { | 104 { |
98 case CONST_INT: | 105 CASE_CONST_SCALAR_INT: |
99 return GEN_INT (INTVAL (x) + c); | 106 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode); |
100 | |
101 case CONST_DOUBLE: | |
102 { | |
103 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x); | |
104 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x); | |
105 unsigned HOST_WIDE_INT l2 = c; | |
106 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0; | |
107 unsigned HOST_WIDE_INT lv; | |
108 HOST_WIDE_INT hv; | |
109 | |
110 add_double (l1, h1, l2, h2, &lv, &hv); | |
111 | |
112 return immed_double_const (lv, hv, VOIDmode); | |
113 } | |
114 | |
115 case MEM: | 107 case MEM: |
116 /* If this is a reference to the constant pool, try replacing it with | 108 /* If this is a reference to the constant pool, try replacing it with |
117 a reference to a new constant. If the resulting address isn't | 109 a reference to a new constant. If the resulting address isn't |
118 valid, don't return it because we have no way to validize it. */ | 110 valid, don't return it because we have no way to validize it. */ |
119 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | 111 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
120 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | 112 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) |
121 { | 113 { |
122 tem | 114 rtx cst = get_pool_constant (XEXP (x, 0)); |
123 = force_const_mem (GET_MODE (x), | 115 |
124 plus_constant (get_pool_constant (XEXP (x, 0)), | 116 if (GET_CODE (cst) == CONST_VECTOR |
125 c)); | 117 && GET_MODE_INNER (GET_MODE (cst)) == mode) |
126 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) | 118 { |
127 return tem; | 119 cst = gen_lowpart (mode, cst); |
120 gcc_assert (cst); | |
121 } | |
122 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode) | |
123 { | |
124 tem = plus_constant (mode, cst, c); | |
125 tem = force_const_mem (GET_MODE (x), tem); | |
126 /* Targets may disallow some constants in the constant pool, thus | |
127 force_const_mem may return NULL_RTX. */ | |
128 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0))) | |
129 return tem; | |
130 } | |
128 } | 131 } |
129 break; | 132 break; |
130 | 133 |
131 case CONST: | 134 case CONST: |
132 /* If adding to something entirely constant, set a flag | 135 /* If adding to something entirely constant, set a flag |
133 so that we can add a CONST around the result. */ | 136 so that we can add a CONST around the result. */ |
137 if (inplace && shared_const_p (x)) | |
138 inplace = false; | |
134 x = XEXP (x, 0); | 139 x = XEXP (x, 0); |
135 all_constant = 1; | 140 all_constant = 1; |
136 goto restart; | 141 goto restart; |
137 | 142 |
138 case SYMBOL_REF: | 143 case SYMBOL_REF: |
139 case LABEL_REF: | 144 case LABEL_REF: |
140 all_constant = 1; | 145 all_constant = 1; |
141 break; | 146 break; |
142 | 147 |
143 case PLUS: | 148 case PLUS: |
144 /* The interesting case is adding the integer to a sum. | 149 /* The interesting case is adding the integer to a sum. Look |
145 Look for constant term in the sum and combine | 150 for constant term in the sum and combine with C. For an |
146 with C. For an integer constant term, we make a combined | 151 integer constant term or a constant term that is not an |
147 integer. For a constant term that is not an explicit integer, | 152 explicit integer, we combine or group them together anyway. |
148 we cannot really combine, but group them together anyway. | |
149 | |
150 Restart or use a recursive call in case the remaining operand is | |
151 something that we handle specially, such as a SYMBOL_REF. | |
152 | 153 |
153 We may not immediately return from the recursive call here, lest | 154 We may not immediately return from the recursive call here, lest |
154 all_constant gets lost. */ | 155 all_constant gets lost. */ |
155 | 156 |
156 if (CONST_INT_P (XEXP (x, 1))) | 157 if (CONSTANT_P (XEXP (x, 1))) |
157 { | 158 { |
158 c += INTVAL (XEXP (x, 1)); | 159 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace); |
159 | 160 if (term == const0_rtx) |
160 if (GET_MODE (x) != VOIDmode) | 161 x = XEXP (x, 0); |
161 c = trunc_int_for_mode (c, GET_MODE (x)); | 162 else if (inplace) |
162 | 163 XEXP (x, 1) = term; |
163 x = XEXP (x, 0); | 164 else |
164 goto restart; | 165 x = gen_rtx_PLUS (mode, XEXP (x, 0), term); |
165 } | |
166 else if (CONSTANT_P (XEXP (x, 1))) | |
167 { | |
168 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c)); | |
169 c = 0; | 166 c = 0; |
170 } | 167 } |
171 else if (find_constant_term_loc (&y)) | 168 else if (rtx *const_loc = find_constant_term_loc (&y)) |
172 { | 169 { |
173 /* We need to be careful since X may be shared and we can't | 170 if (!inplace) |
174 modify it in place. */ | 171 { |
175 rtx copy = copy_rtx (x); | 172 /* We need to be careful since X may be shared and we can't |
176 rtx *const_loc = find_constant_term_loc (©); | 173 modify it in place. */ |
177 | 174 x = copy_rtx (x); |
178 *const_loc = plus_constant (*const_loc, c); | 175 const_loc = find_constant_term_loc (&x); |
179 x = copy; | 176 } |
177 *const_loc = plus_constant (mode, *const_loc, c, true); | |
180 c = 0; | 178 c = 0; |
181 } | 179 } |
182 break; | 180 break; |
183 | 181 |
184 default: | 182 default: |
185 break; | 183 break; |
186 } | 184 } |
187 | 185 |
188 if (c != 0) | 186 if (c != 0) |
189 x = gen_rtx_PLUS (mode, x, GEN_INT (c)); | 187 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode)); |
190 | 188 |
191 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) | 189 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) |
192 return x; | 190 return x; |
193 else if (all_constant) | 191 else if (all_constant) |
194 return gen_rtx_CONST (mode, x); | 192 return gen_rtx_CONST (mode, x); |
233 } | 231 } |
234 | 232 |
235 return x; | 233 return x; |
236 } | 234 } |
237 | 235 |
238 /* Return an rtx for the size in bytes of the value of EXP. */ | |
239 | |
240 rtx | |
241 expr_size (tree exp) | |
242 { | |
243 tree size; | |
244 | |
245 if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
246 size = TREE_OPERAND (exp, 1); | |
247 else | |
248 { | |
249 size = tree_expr_size (exp); | |
250 gcc_assert (size); | |
251 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp)); | |
252 } | |
253 | |
254 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL); | |
255 } | |
256 | |
257 /* Return a wide integer for the size in bytes of the value of EXP, or -1 | |
258 if the size can vary or is larger than an integer. */ | |
259 | |
260 HOST_WIDE_INT | |
261 int_expr_size (tree exp) | |
262 { | |
263 tree size; | |
264 | |
265 if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
266 size = TREE_OPERAND (exp, 1); | |
267 else | |
268 { | |
269 size = tree_expr_size (exp); | |
270 gcc_assert (size); | |
271 } | |
272 | |
273 if (size == 0 || !host_integerp (size, 0)) | |
274 return -1; | |
275 | |
276 return tree_low_cst (size, 0); | |
277 } | |
278 | 236 |
279 /* Return a copy of X in which all memory references | 237 /* Return a copy of X in which all memory references |
280 and all constants that involve symbol refs | 238 and all constants that involve symbol refs |
281 have been replaced with new temporary registers. | 239 have been replaced with new temporary registers. |
282 Also emit code to load the memory locations and constants | 240 Also emit code to load the memory locations and constants |
314 | 272 |
315 /* Given X, a memory address in address space AS' pointer mode, convert it to | 273 /* Given X, a memory address in address space AS' pointer mode, convert it to |
316 an address in the address space's address mode, or vice versa (TO_MODE says | 274 an address in the address space's address mode, or vice versa (TO_MODE says |
317 which way). We take advantage of the fact that pointers are not allowed to | 275 which way). We take advantage of the fact that pointers are not allowed to |
318 overflow by commuting arithmetic operations over conversions so that address | 276 overflow by commuting arithmetic operations over conversions so that address |
319 arithmetic insns can be used. */ | 277 arithmetic insns can be used. IN_CONST is true if this conversion is inside |
278 a CONST. NO_EMIT is true if no insns should be emitted, and instead | |
279 it should return NULL if it can't be simplified without emitting insns. */ | |
320 | 280 |
321 rtx | 281 rtx |
322 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED, | 282 convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED, |
323 rtx x, addr_space_t as ATTRIBUTE_UNUSED) | 283 rtx x, addr_space_t as ATTRIBUTE_UNUSED, |
284 bool in_const ATTRIBUTE_UNUSED, | |
285 bool no_emit ATTRIBUTE_UNUSED) | |
324 { | 286 { |
325 #ifndef POINTERS_EXTEND_UNSIGNED | 287 #ifndef POINTERS_EXTEND_UNSIGNED |
326 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); | 288 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); |
327 return x; | 289 return x; |
328 #else /* defined(POINTERS_EXTEND_UNSIGNED) */ | 290 #else /* defined(POINTERS_EXTEND_UNSIGNED) */ |
329 enum machine_mode pointer_mode, address_mode, from_mode; | 291 scalar_int_mode pointer_mode, address_mode, from_mode; |
330 rtx temp; | 292 rtx temp; |
331 enum rtx_code code; | 293 enum rtx_code code; |
332 | 294 |
333 /* If X already has the right mode, just return it. */ | 295 /* If X already has the right mode, just return it. */ |
334 if (GET_MODE (x) == to_mode) | 296 if (GET_MODE (x) == to_mode) |
340 | 302 |
341 /* Here we handle some special cases. If none of them apply, fall through | 303 /* Here we handle some special cases. If none of them apply, fall through |
342 to the default case. */ | 304 to the default case. */ |
343 switch (GET_CODE (x)) | 305 switch (GET_CODE (x)) |
344 { | 306 { |
345 case CONST_INT: | 307 CASE_CONST_SCALAR_INT: |
346 case CONST_DOUBLE: | |
347 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) | 308 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) |
348 code = TRUNCATE; | 309 code = TRUNCATE; |
349 else if (POINTERS_EXTEND_UNSIGNED < 0) | 310 else if (POINTERS_EXTEND_UNSIGNED < 0) |
350 break; | 311 break; |
351 else if (POINTERS_EXTEND_UNSIGNED > 0) | 312 else if (POINTERS_EXTEND_UNSIGNED > 0) |
362 && GET_MODE (SUBREG_REG (x)) == to_mode) | 323 && GET_MODE (SUBREG_REG (x)) == to_mode) |
363 return SUBREG_REG (x); | 324 return SUBREG_REG (x); |
364 break; | 325 break; |
365 | 326 |
366 case LABEL_REF: | 327 case LABEL_REF: |
367 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0)); | 328 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x)); |
368 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); | 329 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); |
369 return temp; | 330 return temp; |
370 break; | |
371 | 331 |
372 case SYMBOL_REF: | 332 case SYMBOL_REF: |
373 temp = shallow_copy_rtx (x); | 333 temp = shallow_copy_rtx (x); |
374 PUT_MODE (temp, to_mode); | 334 PUT_MODE (temp, to_mode); |
375 return temp; | 335 return temp; |
376 break; | |
377 | 336 |
378 case CONST: | 337 case CONST: |
379 return gen_rtx_CONST (to_mode, | 338 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as, |
380 convert_memory_address_addr_space | 339 true, no_emit); |
381 (to_mode, XEXP (x, 0), as)); | 340 return temp ? gen_rtx_CONST (to_mode, temp) : temp; |
382 break; | |
383 | 341 |
384 case PLUS: | 342 case PLUS: |
385 case MULT: | 343 case MULT: |
386 /* For addition we can safely permute the conversion and addition | 344 /* For addition we can safely permute the conversion and addition |
387 operation if one operand is a constant and converting the constant | 345 operation if one operand is a constant and converting the constant |
388 does not change it or if one operand is a constant and we are | 346 does not change it or if one operand is a constant and we are |
389 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0). | 347 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0). |
390 We can always safely permute them if we are making the address | 348 We can always safely permute them if we are making the address |
391 narrower. */ | 349 narrower. Inside a CONST RTL, this is safe for both pointers |
350 zero or sign extended as pointers cannot wrap. */ | |
392 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) | 351 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
393 || (GET_CODE (x) == PLUS | 352 || (GET_CODE (x) == PLUS |
394 && CONST_INT_P (XEXP (x, 1)) | 353 && CONST_INT_P (XEXP (x, 1)) |
395 && (XEXP (x, 1) == convert_memory_address_addr_space | 354 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0) |
396 (to_mode, XEXP (x, 1), as) | 355 || XEXP (x, 1) == convert_memory_address_addr_space_1 |
397 || POINTERS_EXTEND_UNSIGNED < 0))) | 356 (to_mode, XEXP (x, 1), as, in_const, |
398 return gen_rtx_fmt_ee (GET_CODE (x), to_mode, | 357 no_emit) |
399 convert_memory_address_addr_space | 358 || POINTERS_EXTEND_UNSIGNED < 0))) |
400 (to_mode, XEXP (x, 0), as), | 359 { |
401 XEXP (x, 1)); | 360 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), |
361 as, in_const, no_emit); | |
362 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode, | |
363 temp, XEXP (x, 1)) | |
364 : temp); | |
365 } | |
402 break; | 366 break; |
403 | 367 |
404 default: | 368 default: |
405 break; | 369 break; |
406 } | 370 } |
371 | |
372 if (no_emit) | |
373 return NULL_RTX; | |
407 | 374 |
408 return convert_modes (to_mode, from_mode, | 375 return convert_modes (to_mode, from_mode, |
409 x, POINTERS_EXTEND_UNSIGNED); | 376 x, POINTERS_EXTEND_UNSIGNED); |
410 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ | 377 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ |
411 } | 378 } |
379 | |
380 /* Given X, a memory address in address space AS' pointer mode, convert it to | |
381 an address in the address space's address mode, or vice versa (TO_MODE says | |
382 which way). We take advantage of the fact that pointers are not allowed to | |
383 overflow by commuting arithmetic operations over conversions so that address | |
384 arithmetic insns can be used. */ | |
385 | |
386 rtx | |
387 convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x, | |
388 addr_space_t as) | |
389 { | |
390 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false); | |
391 } | |
412 | 392 |
393 | |
413 /* Return something equivalent to X but valid as a memory address for something | 394 /* Return something equivalent to X but valid as a memory address for something |
414 of mode MODE in the named address space AS. When X is not itself valid, | 395 of mode MODE in the named address space AS. When X is not itself valid, |
415 this works by copying X or subexpressions of it into registers. */ | 396 this works by copying X or subexpressions of it into registers. */ |
416 | 397 |
417 rtx | 398 rtx |
418 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as) | 399 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as) |
419 { | 400 { |
420 rtx oldx = x; | 401 rtx oldx = x; |
421 enum machine_mode address_mode = targetm.addr_space.address_mode (as); | 402 scalar_int_mode address_mode = targetm.addr_space.address_mode (as); |
422 | 403 |
423 x = convert_memory_address_addr_space (address_mode, x, as); | 404 x = convert_memory_address_addr_space (address_mode, x, as); |
424 | 405 |
425 /* By passing constant addresses through registers | 406 /* By passing constant addresses through registers |
426 we get a chance to cse them. */ | 407 we get a chance to cse them. */ |
544 rtx | 525 rtx |
545 use_anchored_address (rtx x) | 526 use_anchored_address (rtx x) |
546 { | 527 { |
547 rtx base; | 528 rtx base; |
548 HOST_WIDE_INT offset; | 529 HOST_WIDE_INT offset; |
530 machine_mode mode; | |
549 | 531 |
550 if (!flag_section_anchors) | 532 if (!flag_section_anchors) |
551 return x; | 533 return x; |
552 | 534 |
553 if (!MEM_P (x)) | 535 if (!MEM_P (x)) |
584 offset -= SYMBOL_REF_BLOCK_OFFSET (base); | 566 offset -= SYMBOL_REF_BLOCK_OFFSET (base); |
585 | 567 |
586 /* If we're going to run a CSE pass, force the anchor into a register. | 568 /* If we're going to run a CSE pass, force the anchor into a register. |
587 We will then be able to reuse registers for several accesses, if the | 569 We will then be able to reuse registers for several accesses, if the |
588 target costs say that that's worthwhile. */ | 570 target costs say that that's worthwhile. */ |
571 mode = GET_MODE (base); | |
589 if (!cse_not_expected) | 572 if (!cse_not_expected) |
590 base = force_reg (GET_MODE (base), base); | 573 base = force_reg (mode, base); |
591 | 574 |
592 return replace_equiv_address (x, plus_constant (base, offset)); | 575 return replace_equiv_address (x, plus_constant (mode, base, offset)); |
593 } | 576 } |
594 | 577 |
595 /* Copy the value or contents of X to a new temp reg and return that reg. */ | 578 /* Copy the value or contents of X to a new temp reg and return that reg. */ |
596 | 579 |
597 rtx | 580 rtx |
621 | 604 |
622 /* Like copy_to_reg but always give the new register mode MODE | 605 /* Like copy_to_reg but always give the new register mode MODE |
623 in case X is a constant. */ | 606 in case X is a constant. */ |
624 | 607 |
625 rtx | 608 rtx |
626 copy_to_mode_reg (enum machine_mode mode, rtx x) | 609 copy_to_mode_reg (machine_mode mode, rtx x) |
627 { | 610 { |
628 rtx temp = gen_reg_rtx (mode); | 611 rtx temp = gen_reg_rtx (mode); |
629 | 612 |
630 /* If not an operand, must be an address with PLUS and MULT so | 613 /* If not an operand, must be an address with PLUS and MULT so |
631 do the computation. */ | 614 do the computation. */ |
645 | 628 |
646 The caller must not alter the value in the register we return, | 629 The caller must not alter the value in the register we return, |
647 since we mark it as a "constant" register. */ | 630 since we mark it as a "constant" register. */ |
648 | 631 |
649 rtx | 632 rtx |
650 force_reg (enum machine_mode mode, rtx x) | 633 force_reg (machine_mode mode, rtx x) |
651 { | 634 { |
652 rtx temp, insn, set; | 635 rtx temp, set; |
636 rtx_insn *insn; | |
653 | 637 |
654 if (REG_P (x)) | 638 if (REG_P (x)) |
655 return x; | 639 return x; |
656 | 640 |
657 if (general_operand (x, mode)) | 641 if (general_operand (x, mode)) |
745 /* Copy X to TARGET (if it's nonzero and a reg) | 729 /* Copy X to TARGET (if it's nonzero and a reg) |
746 or to a new temp reg and return that reg. | 730 or to a new temp reg and return that reg. |
747 MODE is the mode to use for X in case it is a constant. */ | 731 MODE is the mode to use for X in case it is a constant. */ |
748 | 732 |
749 rtx | 733 rtx |
750 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode) | 734 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode) |
751 { | 735 { |
752 rtx temp; | 736 rtx temp; |
753 | 737 |
754 if (target && REG_P (target)) | 738 if (target && REG_P (target)) |
755 temp = target; | 739 temp = target; |
765 to show what signedness to use on extension operations. | 749 to show what signedness to use on extension operations. |
766 | 750 |
767 FOR_RETURN is nonzero if the caller is promoting the return value | 751 FOR_RETURN is nonzero if the caller is promoting the return value |
768 of FNDECL, else it is for promoting args. */ | 752 of FNDECL, else it is for promoting args. */ |
769 | 753 |
770 enum machine_mode | 754 machine_mode |
771 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp, | 755 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp, |
772 const_tree funtype, int for_return) | 756 const_tree funtype, int for_return) |
773 { | 757 { |
758 /* Called without a type node for a libcall. */ | |
759 if (type == NULL_TREE) | |
760 { | |
761 if (INTEGRAL_MODE_P (mode)) | |
762 return targetm.calls.promote_function_mode (NULL_TREE, mode, | |
763 punsignedp, funtype, | |
764 for_return); | |
765 else | |
766 return mode; | |
767 } | |
768 | |
774 switch (TREE_CODE (type)) | 769 switch (TREE_CODE (type)) |
775 { | 770 { |
776 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | 771 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
777 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | 772 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
778 case POINTER_TYPE: case REFERENCE_TYPE: | 773 case POINTER_TYPE: case REFERENCE_TYPE: |
785 } | 780 } |
786 /* Return the mode to use to store a scalar of TYPE and MODE. | 781 /* Return the mode to use to store a scalar of TYPE and MODE. |
787 PUNSIGNEDP points to the signedness of the type and may be adjusted | 782 PUNSIGNEDP points to the signedness of the type and may be adjusted |
788 to show what signedness to use on extension operations. */ | 783 to show what signedness to use on extension operations. */ |
789 | 784 |
790 enum machine_mode | 785 machine_mode |
791 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode, | 786 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode, |
792 int *punsignedp ATTRIBUTE_UNUSED) | 787 int *punsignedp ATTRIBUTE_UNUSED) |
793 { | 788 { |
789 #ifdef PROMOTE_MODE | |
790 enum tree_code code; | |
791 int unsignedp; | |
792 scalar_mode smode; | |
793 #endif | |
794 | |
795 /* For libcalls this is invoked without TYPE from the backends | |
796 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that | |
797 case. */ | |
798 if (type == NULL_TREE) | |
799 return mode; | |
800 | |
794 /* FIXME: this is the same logic that was there until GCC 4.4, but we | 801 /* FIXME: this is the same logic that was there until GCC 4.4, but we |
795 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE | 802 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE |
796 is not defined. The affected targets are M32C, S390, SPARC. */ | 803 is not defined. The affected targets are M32C, S390, SPARC. */ |
797 #ifdef PROMOTE_MODE | 804 #ifdef PROMOTE_MODE |
798 const enum tree_code code = TREE_CODE (type); | 805 code = TREE_CODE (type); |
799 int unsignedp = *punsignedp; | 806 unsignedp = *punsignedp; |
800 | 807 |
801 switch (code) | 808 switch (code) |
802 { | 809 { |
803 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | 810 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
804 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | 811 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
805 PROMOTE_MODE (mode, unsignedp, type); | 812 /* Values of these types always have scalar mode. */ |
813 smode = as_a <scalar_mode> (mode); | |
814 PROMOTE_MODE (smode, unsignedp, type); | |
806 *punsignedp = unsignedp; | 815 *punsignedp = unsignedp; |
807 return mode; | 816 return smode; |
808 break; | |
809 | 817 |
810 #ifdef POINTERS_EXTEND_UNSIGNED | 818 #ifdef POINTERS_EXTEND_UNSIGNED |
811 case REFERENCE_TYPE: | 819 case REFERENCE_TYPE: |
812 case POINTER_TYPE: | 820 case POINTER_TYPE: |
813 *punsignedp = POINTERS_EXTEND_UNSIGNED; | 821 *punsignedp = POINTERS_EXTEND_UNSIGNED; |
814 return targetm.addr_space.address_mode | 822 return targetm.addr_space.address_mode |
815 (TYPE_ADDR_SPACE (TREE_TYPE (type))); | 823 (TYPE_ADDR_SPACE (TREE_TYPE (type))); |
816 break; | |
817 #endif | 824 #endif |
818 | 825 |
819 default: | 826 default: |
820 return mode; | 827 return mode; |
821 } | 828 } |
827 | 834 |
828 /* Use one of promote_mode or promote_function_mode to find the promoted | 835 /* Use one of promote_mode or promote_function_mode to find the promoted |
829 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness | 836 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness |
830 of DECL after promotion. */ | 837 of DECL after promotion. */ |
831 | 838 |
832 enum machine_mode | 839 machine_mode |
833 promote_decl_mode (const_tree decl, int *punsignedp) | 840 promote_decl_mode (const_tree decl, int *punsignedp) |
834 { | 841 { |
835 tree type = TREE_TYPE (decl); | 842 tree type = TREE_TYPE (decl); |
836 int unsignedp = TYPE_UNSIGNED (type); | 843 int unsignedp = TYPE_UNSIGNED (type); |
837 enum machine_mode mode = DECL_MODE (decl); | 844 machine_mode mode = DECL_MODE (decl); |
838 enum machine_mode pmode; | 845 machine_mode pmode; |
839 | 846 |
840 if (TREE_CODE (decl) == RESULT_DECL | 847 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl)) |
841 || TREE_CODE (decl) == PARM_DECL) | 848 pmode = promote_function_mode (type, mode, &unsignedp, |
849 TREE_TYPE (current_function_decl), 1); | |
850 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL) | |
842 pmode = promote_function_mode (type, mode, &unsignedp, | 851 pmode = promote_function_mode (type, mode, &unsignedp, |
843 TREE_TYPE (current_function_decl), 2); | 852 TREE_TYPE (current_function_decl), 2); |
844 else | 853 else |
845 pmode = promote_mode (type, mode, &unsignedp); | 854 pmode = promote_mode (type, mode, &unsignedp); |
846 | 855 |
847 if (punsignedp) | 856 if (punsignedp) |
848 *punsignedp = unsignedp; | 857 *punsignedp = unsignedp; |
849 return pmode; | 858 return pmode; |
850 } | 859 } |
851 | 860 |
861 /* Return the promoted mode for name. If it is a named SSA_NAME, it | |
862 is the same as promote_decl_mode. Otherwise, it is the promoted | |
863 mode of a temp decl of same type as the SSA_NAME, if we had created | |
864 one. */ | |
865 | |
866 machine_mode | |
867 promote_ssa_mode (const_tree name, int *punsignedp) | |
868 { | |
869 gcc_assert (TREE_CODE (name) == SSA_NAME); | |
870 | |
871 /* Partitions holding parms and results must be promoted as expected | |
872 by function.c. */ | |
873 if (SSA_NAME_VAR (name) | |
874 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL | |
875 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL)) | |
876 { | |
877 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp); | |
878 if (mode != BLKmode) | |
879 return mode; | |
880 } | |
881 | |
882 tree type = TREE_TYPE (name); | |
883 int unsignedp = TYPE_UNSIGNED (type); | |
884 machine_mode mode = TYPE_MODE (type); | |
885 | |
886 /* Bypass TYPE_MODE when it maps vector modes to BLKmode. */ | |
887 if (mode == BLKmode) | |
888 { | |
889 gcc_assert (VECTOR_TYPE_P (type)); | |
890 mode = type->type_common.mode; | |
891 } | |
892 | |
893 machine_mode pmode = promote_mode (type, mode, &unsignedp); | |
894 if (punsignedp) | |
895 *punsignedp = unsignedp; | |
896 | |
897 return pmode; | |
898 } | |
899 | |
900 | |
852 | 901 |
902 /* Controls the behavior of {anti_,}adjust_stack. */ | |
903 static bool suppress_reg_args_size; | |
904 | |
905 /* A helper for adjust_stack and anti_adjust_stack. */ | |
906 | |
907 static void | |
908 adjust_stack_1 (rtx adjust, bool anti_p) | |
909 { | |
910 rtx temp; | |
911 rtx_insn *insn; | |
912 | |
913 /* Hereafter anti_p means subtract_p. */ | |
914 if (!STACK_GROWS_DOWNWARD) | |
915 anti_p = !anti_p; | |
916 | |
917 temp = expand_binop (Pmode, | |
918 anti_p ? sub_optab : add_optab, | |
919 stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
920 OPTAB_LIB_WIDEN); | |
921 | |
922 if (temp != stack_pointer_rtx) | |
923 insn = emit_move_insn (stack_pointer_rtx, temp); | |
924 else | |
925 { | |
926 insn = get_last_insn (); | |
927 temp = single_set (insn); | |
928 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx); | |
929 } | |
930 | |
931 if (!suppress_reg_args_size) | |
932 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
933 } | |
934 | |
853 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). | 935 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
854 This pops when ADJUST is positive. ADJUST need not be constant. */ | 936 This pops when ADJUST is positive. ADJUST need not be constant. */ |
855 | 937 |
856 void | 938 void |
857 adjust_stack (rtx adjust) | 939 adjust_stack (rtx adjust) |
858 { | 940 { |
859 rtx temp; | |
860 | |
861 if (adjust == const0_rtx) | 941 if (adjust == const0_rtx) |
862 return; | 942 return; |
863 | 943 |
864 /* We expect all variable sized adjustments to be multiple of | 944 /* We expect all variable sized adjustments to be multiple of |
865 PREFERRED_STACK_BOUNDARY. */ | 945 PREFERRED_STACK_BOUNDARY. */ |
866 if (CONST_INT_P (adjust)) | 946 if (CONST_INT_P (adjust)) |
867 stack_pointer_delta -= INTVAL (adjust); | 947 stack_pointer_delta -= INTVAL (adjust); |
868 | 948 |
869 temp = expand_binop (Pmode, | 949 adjust_stack_1 (adjust, false); |
870 #ifdef STACK_GROWS_DOWNWARD | |
871 add_optab, | |
872 #else | |
873 sub_optab, | |
874 #endif | |
875 stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
876 OPTAB_LIB_WIDEN); | |
877 | |
878 if (temp != stack_pointer_rtx) | |
879 emit_move_insn (stack_pointer_rtx, temp); | |
880 } | 950 } |
881 | 951 |
882 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). | 952 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). |
883 This pushes when ADJUST is positive. ADJUST need not be constant. */ | 953 This pushes when ADJUST is positive. ADJUST need not be constant. */ |
884 | 954 |
885 void | 955 void |
886 anti_adjust_stack (rtx adjust) | 956 anti_adjust_stack (rtx adjust) |
887 { | 957 { |
888 rtx temp; | |
889 | |
890 if (adjust == const0_rtx) | 958 if (adjust == const0_rtx) |
891 return; | 959 return; |
892 | 960 |
893 /* We expect all variable sized adjustments to be multiple of | 961 /* We expect all variable sized adjustments to be multiple of |
894 PREFERRED_STACK_BOUNDARY. */ | 962 PREFERRED_STACK_BOUNDARY. */ |
895 if (CONST_INT_P (adjust)) | 963 if (CONST_INT_P (adjust)) |
896 stack_pointer_delta += INTVAL (adjust); | 964 stack_pointer_delta += INTVAL (adjust); |
897 | 965 |
898 temp = expand_binop (Pmode, | 966 adjust_stack_1 (adjust, true); |
899 #ifdef STACK_GROWS_DOWNWARD | |
900 sub_optab, | |
901 #else | |
902 add_optab, | |
903 #endif | |
904 stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
905 OPTAB_LIB_WIDEN); | |
906 | |
907 if (temp != stack_pointer_rtx) | |
908 emit_move_insn (stack_pointer_rtx, temp); | |
909 } | 967 } |
910 | 968 |
911 /* Round the size of a block to be pushed up to the boundary required | 969 /* Round the size of a block to be pushed up to the boundary required |
912 by this machine. SIZE is the desired size, which need not be constant. */ | 970 by this machine. SIZE is the desired size, which need not be constant. */ |
913 | 971 |
941 /* If crtl->preferred_stack_boundary might still grow, use | 999 /* If crtl->preferred_stack_boundary might still grow, use |
942 virtual_preferred_stack_boundary_rtx instead. This will be | 1000 virtual_preferred_stack_boundary_rtx instead. This will be |
943 substituted by the right value in vregs pass and optimized | 1001 substituted by the right value in vregs pass and optimized |
944 during combine. */ | 1002 during combine. */ |
945 align_rtx = virtual_preferred_stack_boundary_rtx; | 1003 align_rtx = virtual_preferred_stack_boundary_rtx; |
946 alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX); | 1004 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1), |
1005 NULL_RTX); | |
947 } | 1006 } |
948 | 1007 |
949 /* CEIL_DIV_EXPR needs to worry about the addition overflowing, | 1008 /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
950 but we know it can't. So add ourselves and then do | 1009 but we know it can't. So add ourselves and then do |
951 TRUNC_DIV_EXPR. */ | 1010 TRUNC_DIV_EXPR. */ |
966 void | 1025 void |
967 emit_stack_save (enum save_level save_level, rtx *psave) | 1026 emit_stack_save (enum save_level save_level, rtx *psave) |
968 { | 1027 { |
969 rtx sa = *psave; | 1028 rtx sa = *psave; |
970 /* The default is that we use a move insn and save in a Pmode object. */ | 1029 /* The default is that we use a move insn and save in a Pmode object. */ |
971 rtx (*fcn) (rtx, rtx) = gen_move_insn; | 1030 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn; |
972 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level); | 1031 machine_mode mode = STACK_SAVEAREA_MODE (save_level); |
973 | 1032 |
974 /* See if this machine has anything special to do for this kind of save. */ | 1033 /* See if this machine has anything special to do for this kind of save. */ |
975 switch (save_level) | 1034 switch (save_level) |
976 { | 1035 { |
977 #ifdef HAVE_save_stack_block | |
978 case SAVE_BLOCK: | 1036 case SAVE_BLOCK: |
979 if (HAVE_save_stack_block) | 1037 if (targetm.have_save_stack_block ()) |
980 fcn = gen_save_stack_block; | 1038 fcn = targetm.gen_save_stack_block; |
981 break; | 1039 break; |
982 #endif | |
983 #ifdef HAVE_save_stack_function | |
984 case SAVE_FUNCTION: | 1040 case SAVE_FUNCTION: |
985 if (HAVE_save_stack_function) | 1041 if (targetm.have_save_stack_function ()) |
986 fcn = gen_save_stack_function; | 1042 fcn = targetm.gen_save_stack_function; |
987 break; | 1043 break; |
988 #endif | |
989 #ifdef HAVE_save_stack_nonlocal | |
990 case SAVE_NONLOCAL: | 1044 case SAVE_NONLOCAL: |
991 if (HAVE_save_stack_nonlocal) | 1045 if (targetm.have_save_stack_nonlocal ()) |
992 fcn = gen_save_stack_nonlocal; | 1046 fcn = targetm.gen_save_stack_nonlocal; |
993 break; | 1047 break; |
994 #endif | |
995 default: | 1048 default: |
996 break; | 1049 break; |
997 } | 1050 } |
998 | 1051 |
999 /* If there is no save area and we have to allocate one, do so. Otherwise | 1052 /* If there is no save area and we have to allocate one, do so. Otherwise |
1021 | 1074 |
1022 void | 1075 void |
1023 emit_stack_restore (enum save_level save_level, rtx sa) | 1076 emit_stack_restore (enum save_level save_level, rtx sa) |
1024 { | 1077 { |
1025 /* The default is that we use a move insn. */ | 1078 /* The default is that we use a move insn. */ |
1026 rtx (*fcn) (rtx, rtx) = gen_move_insn; | 1079 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn; |
1080 | |
1081 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both | |
1082 STACK_POINTER and HARD_FRAME_POINTER. | |
1083 If stack_realign_fp, the x86 backend emits a prologue that aligns only | |
1084 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing | |
1085 aligned variables, which is reflected in ix86_can_eliminate. | |
1086 We normally still have the realigned STACK_POINTER that we can use. | |
1087 But if there is a stack restore still present at reload, it can trigger | |
1088 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate | |
1089 FRAME_POINTER into a hard reg. | |
1090 To prevent this situation, we force need_drap if we emit a stack | |
1091 restore. */ | |
1092 if (SUPPORTS_STACK_ALIGNMENT) | |
1093 crtl->need_drap = true; | |
1027 | 1094 |
1028 /* See if this machine has anything special to do for this kind of save. */ | 1095 /* See if this machine has anything special to do for this kind of save. */ |
1029 switch (save_level) | 1096 switch (save_level) |
1030 { | 1097 { |
1031 #ifdef HAVE_restore_stack_block | |
1032 case SAVE_BLOCK: | 1098 case SAVE_BLOCK: |
1033 if (HAVE_restore_stack_block) | 1099 if (targetm.have_restore_stack_block ()) |
1034 fcn = gen_restore_stack_block; | 1100 fcn = targetm.gen_restore_stack_block; |
1035 break; | 1101 break; |
1036 #endif | |
1037 #ifdef HAVE_restore_stack_function | |
1038 case SAVE_FUNCTION: | 1102 case SAVE_FUNCTION: |
1039 if (HAVE_restore_stack_function) | 1103 if (targetm.have_restore_stack_function ()) |
1040 fcn = gen_restore_stack_function; | 1104 fcn = targetm.gen_restore_stack_function; |
1041 break; | 1105 break; |
1042 #endif | |
1043 #ifdef HAVE_restore_stack_nonlocal | |
1044 case SAVE_NONLOCAL: | 1106 case SAVE_NONLOCAL: |
1045 if (HAVE_restore_stack_nonlocal) | 1107 if (targetm.have_restore_stack_nonlocal ()) |
1046 fcn = gen_restore_stack_nonlocal; | 1108 fcn = targetm.gen_restore_stack_nonlocal; |
1047 break; | 1109 break; |
1048 #endif | |
1049 default: | 1110 default: |
1050 break; | 1111 break; |
1051 } | 1112 } |
1052 | 1113 |
1053 if (sa != 0) | 1114 if (sa != 0) |
1064 | 1125 |
1065 emit_insn (fcn (stack_pointer_rtx, sa)); | 1126 emit_insn (fcn (stack_pointer_rtx, sa)); |
1066 } | 1127 } |
1067 | 1128 |
1068 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current | 1129 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current |
1069 function. This function should be called whenever we allocate or | 1130 function. This should be called whenever we allocate or deallocate |
1070 deallocate dynamic stack space. */ | 1131 dynamic stack space. */ |
1071 | 1132 |
1072 void | 1133 void |
1073 update_nonlocal_goto_save_area (void) | 1134 update_nonlocal_goto_save_area (void) |
1074 { | 1135 { |
1075 tree t_save; | 1136 tree t_save; |
1077 | 1138 |
1078 /* The nonlocal_goto_save_area object is an array of N pointers. The | 1139 /* The nonlocal_goto_save_area object is an array of N pointers. The |
1079 first one is used for the frame pointer save; the rest are sized by | 1140 first one is used for the frame pointer save; the rest are sized by |
1080 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first | 1141 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first |
1081 of the stack save area slots. */ | 1142 of the stack save area slots. */ |
1082 t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area, | 1143 t_save = build4 (ARRAY_REF, |
1144 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
1145 cfun->nonlocal_goto_save_area, | |
1083 integer_one_node, NULL_TREE, NULL_TREE); | 1146 integer_one_node, NULL_TREE, NULL_TREE); |
1084 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); | 1147 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1085 | 1148 |
1086 emit_stack_save (SAVE_NONLOCAL, &r_save); | 1149 emit_stack_save (SAVE_NONLOCAL, &r_save); |
1087 } | 1150 } |
1151 | |
1152 /* Record a new stack level for the current function. This should be called | |
1153 whenever we allocate or deallocate dynamic stack space. */ | |
1154 | |
1155 void | |
1156 record_new_stack_level (void) | |
1157 { | |
1158 /* Record the new stack level for nonlocal gotos. */ | |
1159 if (cfun->nonlocal_goto_save_area) | |
1160 update_nonlocal_goto_save_area (); | |
1161 | |
1162 /* Record the new stack level for SJLJ exceptions. */ | |
1163 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) | |
1164 update_sjlj_context (); | |
1165 } | |
1088 | 1166 |
1167 /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */ | |
1168 static rtx | |
1169 align_dynamic_address (rtx target, unsigned required_align) | |
1170 { | |
1171 /* CEIL_DIV_EXPR needs to worry about the addition overflowing, | |
1172 but we know it can't. So add ourselves and then do | |
1173 TRUNC_DIV_EXPR. */ | |
1174 target = expand_binop (Pmode, add_optab, target, | |
1175 gen_int_mode (required_align / BITS_PER_UNIT - 1, | |
1176 Pmode), | |
1177 NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1178 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
1179 gen_int_mode (required_align / BITS_PER_UNIT, | |
1180 Pmode), | |
1181 NULL_RTX, 1); | |
1182 target = expand_mult (Pmode, target, | |
1183 gen_int_mode (required_align / BITS_PER_UNIT, | |
1184 Pmode), | |
1185 NULL_RTX, 1); | |
1186 | |
1187 return target; | |
1188 } | |
1189 | |
1190 /* Return an rtx through *PSIZE, representing the size of an area of memory to | |
1191 be dynamically pushed on the stack. | |
1192 | |
1193 *PSIZE is an rtx representing the size of the area. | |
1194 | |
1195 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This | |
1196 parameter may be zero. If so, a proper value will be extracted | |
1197 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | |
1198 | |
1199 REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1200 of memory. | |
1201 | |
1202 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for | |
1203 the additional size returned. */ | |
1204 void | |
1205 get_dynamic_stack_size (rtx *psize, unsigned size_align, | |
1206 unsigned required_align, | |
1207 HOST_WIDE_INT *pstack_usage_size) | |
1208 { | |
1209 unsigned extra = 0; | |
1210 rtx size = *psize; | |
1211 | |
1212 /* Ensure the size is in the proper mode. */ | |
1213 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1214 size = convert_to_mode (Pmode, size, 1); | |
1215 | |
1216 if (CONST_INT_P (size)) | |
1217 { | |
1218 unsigned HOST_WIDE_INT lsb; | |
1219 | |
1220 lsb = INTVAL (size); | |
1221 lsb &= -lsb; | |
1222 | |
1223 /* Watch out for overflow truncating to "unsigned". */ | |
1224 if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1225 size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1226 else | |
1227 size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1228 } | |
1229 else if (size_align < BITS_PER_UNIT) | |
1230 size_align = BITS_PER_UNIT; | |
1231 | |
1232 /* We can't attempt to minimize alignment necessary, because we don't | |
1233 know the final value of preferred_stack_boundary yet while executing | |
1234 this code. */ | |
1235 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) | |
1236 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
1237 | |
1238 /* We will need to ensure that the address we return is aligned to | |
1239 REQUIRED_ALIGN. At this point in the compilation, we don't always | |
1240 know the final value of the STACK_DYNAMIC_OFFSET used in function.c | |
1241 (it might depend on the size of the outgoing parameter lists, for | |
1242 example), so we must preventively align the value. We leave space | |
1243 in SIZE for the hole that might result from the alignment operation. */ | |
1244 | |
1245 /* Since the stack is presumed to be aligned before this allocation, | |
1246 we only need to increase the size of the allocation if the required | |
1247 alignment is more than the stack alignment. */ | |
1248 if (required_align > STACK_BOUNDARY) | |
1249 { | |
1250 extra = (required_align - STACK_BOUNDARY) / BITS_PER_UNIT; | |
1251 size = plus_constant (Pmode, size, extra); | |
1252 size = force_operand (size, NULL_RTX); | |
1253 if (size_align > STACK_BOUNDARY) | |
1254 size_align = STACK_BOUNDARY; | |
1255 | |
1256 if (flag_stack_usage_info && pstack_usage_size) | |
1257 *pstack_usage_size += extra; | |
1258 } | |
1259 | |
1260 /* Round the size to a multiple of the required stack alignment. | |
1261 Since the stack is presumed to be rounded before this allocation, | |
1262 this will maintain the required alignment. | |
1263 | |
1264 If the stack grows downward, we could save an insn by subtracting | |
1265 SIZE from the stack pointer and then aligning the stack pointer. | |
1266 The problem with this is that the stack pointer may be unaligned | |
1267 between the execution of the subtraction and alignment insns and | |
1268 some machines do not allow this. Even on those that do, some | |
1269 signal handlers malfunction if a signal should occur between those | |
1270 insns. Since this is an extremely rare event, we have no reliable | |
1271 way of knowing which systems have this problem. So we avoid even | |
1272 momentarily mis-aligning the stack. */ | |
1273 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) | |
1274 { | |
1275 size = round_push (size); | |
1276 | |
1277 if (flag_stack_usage_info && pstack_usage_size) | |
1278 { | |
1279 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; | |
1280 *pstack_usage_size = | |
1281 (*pstack_usage_size + align - 1) / align * align; | |
1282 } | |
1283 } | |
1284 | |
1285 *psize = size; | |
1286 } | |
1287 | |
1288 /* Return the number of bytes to "protect" on the stack for -fstack-check. | |
1289 | |
1290 "protect" in the context of -fstack-check means how many bytes we | |
1291 should always ensure are available on the stack. More importantly | |
1292 this is how many bytes are skipped when probing the stack. | |
1293 | |
1294 On some targets we want to reuse the -fstack-check prologue support | |
1295 to give a degree of protection against stack clashing style attacks. | |
1296 | |
1297 In that scenario we do not want to skip bytes before probing as that | |
1298 would render the stack clash protections useless. | |
1299 | |
1300 So we never use STACK_CHECK_PROTECT directly. Instead we indirect though | |
1301 this helper which allows us to provide different values for | |
1302 -fstack-check and -fstack-clash-protection. */ | |
1303 HOST_WIDE_INT | |
1304 get_stack_check_protect (void) | |
1305 { | |
1306 if (flag_stack_clash_protection) | |
1307 return 0; | |
1308 return STACK_CHECK_PROTECT; | |
1309 } | |
1310 | |
1089 /* Return an rtx representing the address of an area of memory dynamically | 1311 /* Return an rtx representing the address of an area of memory dynamically |
1090 pushed on the stack. | 1312 pushed on the stack. |
1091 | 1313 |
1092 Any required stack pointer alignment is preserved. | 1314 Any required stack pointer alignment is preserved. |
1093 | 1315 |
1094 SIZE is an rtx representing the size of the area. | 1316 SIZE is an rtx representing the size of the area. |
1095 | 1317 |
1096 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This | 1318 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This |
1097 parameter may be zero. If so, a proper value will be extracted | 1319 parameter may be zero. If so, a proper value will be extracted |
1098 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | 1320 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. |
1099 | 1321 |
1100 REQUIRED_ALIGN is the alignment (in bits) required for the region | 1322 REQUIRED_ALIGN is the alignment (in bits) required for the region |
1101 of memory. | 1323 of memory. |
1324 | |
1325 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if | |
1326 no such upper bound is known. | |
1102 | 1327 |
1103 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the | 1328 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the |
1104 stack space allocated by the generated code cannot be added with itself | 1329 stack space allocated by the generated code cannot be added with itself |
1105 in the course of the execution of the function. It is always safe to | 1330 in the course of the execution of the function. It is always safe to |
1106 pass FALSE here and the following criterion is sufficient in order to | 1331 pass FALSE here and the following criterion is sufficient in order to |
1107 pass TRUE: every path in the CFG that starts at the allocation point and | 1332 pass TRUE: every path in the CFG that starts at the allocation point and |
1108 loops to it executes the associated deallocation code. */ | 1333 loops to it executes the associated deallocation code. */ |
1109 | 1334 |
1110 rtx | 1335 rtx |
1111 allocate_dynamic_stack_space (rtx size, unsigned size_align, | 1336 allocate_dynamic_stack_space (rtx size, unsigned size_align, |
1112 unsigned required_align, bool cannot_accumulate) | 1337 unsigned required_align, |
1338 HOST_WIDE_INT max_size, | |
1339 bool cannot_accumulate) | |
1113 { | 1340 { |
1114 HOST_WIDE_INT stack_usage_size = -1; | 1341 HOST_WIDE_INT stack_usage_size = -1; |
1115 rtx final_label, final_target, target; | 1342 rtx_code_label *final_label; |
1116 unsigned extra_align = 0; | 1343 rtx final_target, target; |
1117 bool must_align; | |
1118 | 1344 |
1119 /* If we're asking for zero bytes, it doesn't matter what we point | 1345 /* If we're asking for zero bytes, it doesn't matter what we point |
1120 to since we can't dereference it. But return a reasonable | 1346 to since we can't dereference it. But return a reasonable |
1121 address anyway. */ | 1347 address anyway. */ |
1122 if (size == const0_rtx) | 1348 if (size == const0_rtx) |
1126 cfun->calls_alloca = 1; | 1352 cfun->calls_alloca = 1; |
1127 | 1353 |
1128 /* If stack usage info is requested, look into the size we are passed. | 1354 /* If stack usage info is requested, look into the size we are passed. |
1129 We need to do so this early to avoid the obfuscation that may be | 1355 We need to do so this early to avoid the obfuscation that may be |
1130 introduced later by the various alignment operations. */ | 1356 introduced later by the various alignment operations. */ |
1131 if (flag_stack_usage) | 1357 if (flag_stack_usage_info) |
1132 { | 1358 { |
1133 if (CONST_INT_P (size)) | 1359 if (CONST_INT_P (size)) |
1134 stack_usage_size = INTVAL (size); | 1360 stack_usage_size = INTVAL (size); |
1135 else if (REG_P (size)) | 1361 else if (REG_P (size)) |
1136 { | 1362 { |
1137 /* Look into the last emitted insn and see if we can deduce | 1363 /* Look into the last emitted insn and see if we can deduce |
1138 something for the register. */ | 1364 something for the register. */ |
1139 rtx insn, set, note; | 1365 rtx_insn *insn; |
1366 rtx set, note; | |
1140 insn = get_last_insn (); | 1367 insn = get_last_insn (); |
1141 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) | 1368 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) |
1142 { | 1369 { |
1143 if (CONST_INT_P (SET_SRC (set))) | 1370 if (CONST_INT_P (SET_SRC (set))) |
1144 stack_usage_size = INTVAL (SET_SRC (set)); | 1371 stack_usage_size = INTVAL (SET_SRC (set)); |
1146 && CONST_INT_P (XEXP (note, 0))) | 1373 && CONST_INT_P (XEXP (note, 0))) |
1147 stack_usage_size = INTVAL (XEXP (note, 0)); | 1374 stack_usage_size = INTVAL (XEXP (note, 0)); |
1148 } | 1375 } |
1149 } | 1376 } |
1150 | 1377 |
1151 /* If the size is not constant, we can't say anything. */ | 1378 /* If the size is not constant, try the maximum size. */ |
1152 if (stack_usage_size == -1) | 1379 if (stack_usage_size < 0) |
1380 stack_usage_size = max_size; | |
1381 | |
1382 /* If the size is still not constant, we can't say anything. */ | |
1383 if (stack_usage_size < 0) | |
1153 { | 1384 { |
1154 current_function_has_unbounded_dynamic_stack_size = 1; | 1385 current_function_has_unbounded_dynamic_stack_size = 1; |
1155 stack_usage_size = 0; | 1386 stack_usage_size = 0; |
1156 } | 1387 } |
1157 } | 1388 } |
1158 | 1389 |
1159 /* Ensure the size is in the proper mode. */ | 1390 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size); |
1160 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1161 size = convert_to_mode (Pmode, size, 1); | |
1162 | |
1163 /* Adjust SIZE_ALIGN, if needed. */ | |
1164 if (CONST_INT_P (size)) | |
1165 { | |
1166 unsigned HOST_WIDE_INT lsb; | |
1167 | |
1168 lsb = INTVAL (size); | |
1169 lsb &= -lsb; | |
1170 | |
1171 /* Watch out for overflow truncating to "unsigned". */ | |
1172 if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1173 size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1174 else | |
1175 size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1176 } | |
1177 else if (size_align < BITS_PER_UNIT) | |
1178 size_align = BITS_PER_UNIT; | |
1179 | |
1180 /* We can't attempt to minimize alignment necessary, because we don't | |
1181 know the final value of preferred_stack_boundary yet while executing | |
1182 this code. */ | |
1183 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) | |
1184 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
1185 | |
1186 /* We will need to ensure that the address we return is aligned to | |
1187 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't | |
1188 always know its final value at this point in the compilation (it | |
1189 might depend on the size of the outgoing parameter lists, for | |
1190 example), so we must align the value to be returned in that case. | |
1191 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if | |
1192 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined). | |
1193 We must also do an alignment operation on the returned value if | |
1194 the stack pointer alignment is less strict than REQUIRED_ALIGN. | |
1195 | |
1196 If we have to align, we must leave space in SIZE for the hole | |
1197 that might result from the alignment operation. */ | |
1198 | |
1199 must_align = (crtl->preferred_stack_boundary < required_align); | |
1200 if (must_align) | |
1201 { | |
1202 if (required_align > PREFERRED_STACK_BOUNDARY) | |
1203 extra_align = PREFERRED_STACK_BOUNDARY; | |
1204 else if (required_align > STACK_BOUNDARY) | |
1205 extra_align = STACK_BOUNDARY; | |
1206 else | |
1207 extra_align = BITS_PER_UNIT; | |
1208 } | |
1209 | |
1210 /* ??? STACK_POINTER_OFFSET is always defined now. */ | |
1211 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) | |
1212 must_align = true; | |
1213 extra_align = BITS_PER_UNIT; | |
1214 #endif | |
1215 | |
1216 if (must_align) | |
1217 { | |
1218 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT; | |
1219 | |
1220 size = plus_constant (size, extra); | |
1221 size = force_operand (size, NULL_RTX); | |
1222 | |
1223 if (flag_stack_usage) | |
1224 stack_usage_size += extra; | |
1225 | |
1226 if (extra && size_align > extra_align) | |
1227 size_align = extra_align; | |
1228 } | |
1229 | |
1230 #ifdef SETJMP_VIA_SAVE_AREA | |
1231 /* If setjmp restores regs from a save area in the stack frame, | |
1232 avoid clobbering the reg save area. Note that the offset of | |
1233 virtual_incoming_args_rtx includes the preallocated stack args space. | |
1234 It would be no problem to clobber that, but it's on the wrong side | |
1235 of the old save area. | |
1236 | |
1237 What used to happen is that, since we did not know for sure | |
1238 whether setjmp() was invoked until after RTL generation, we | |
1239 would use reg notes to store the "optimized" size and fix things | |
1240 up later. These days we know this information before we ever | |
1241 start building RTL so the reg notes are unnecessary. */ | |
1242 if (cfun->calls_setjmp) | |
1243 { | |
1244 rtx dynamic_offset | |
1245 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx, | |
1246 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1247 | |
1248 size = expand_binop (Pmode, add_optab, size, dynamic_offset, | |
1249 NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1250 | |
1251 /* The above dynamic offset cannot be computed statically at this | |
1252 point, but it will be possible to do so after RTL expansion is | |
1253 done. Record how many times we will need to add it. */ | |
1254 if (flag_stack_usage) | |
1255 current_function_dynamic_alloc_count++; | |
1256 | |
1257 /* ??? Can we infer a minimum of STACK_BOUNDARY here? */ | |
1258 size_align = BITS_PER_UNIT; | |
1259 } | |
1260 #endif /* SETJMP_VIA_SAVE_AREA */ | |
1261 | |
1262 /* Round the size to a multiple of the required stack alignment. | |
1263 Since the stack if presumed to be rounded before this allocation, | |
1264 this will maintain the required alignment. | |
1265 | |
1266 If the stack grows downward, we could save an insn by subtracting | |
1267 SIZE from the stack pointer and then aligning the stack pointer. | |
1268 The problem with this is that the stack pointer may be unaligned | |
1269 between the execution of the subtraction and alignment insns and | |
1270 some machines do not allow this. Even on those that do, some | |
1271 signal handlers malfunction if a signal should occur between those | |
1272 insns. Since this is an extremely rare event, we have no reliable | |
1273 way of knowing which systems have this problem. So we avoid even | |
1274 momentarily mis-aligning the stack. */ | |
1275 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) | |
1276 { | |
1277 size = round_push (size); | |
1278 | |
1279 if (flag_stack_usage) | |
1280 { | |
1281 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; | |
1282 stack_usage_size = (stack_usage_size + align - 1) / align * align; | |
1283 } | |
1284 } | |
1285 | 1391 |
1286 target = gen_reg_rtx (Pmode); | 1392 target = gen_reg_rtx (Pmode); |
1287 | 1393 |
1288 /* The size is supposed to be fully adjusted at this point so record it | 1394 /* The size is supposed to be fully adjusted at this point so record it |
1289 if stack usage info is requested. */ | 1395 if stack usage info is requested. */ |
1290 if (flag_stack_usage) | 1396 if (flag_stack_usage_info) |
1291 { | 1397 { |
1292 current_function_dynamic_stack_size += stack_usage_size; | 1398 current_function_dynamic_stack_size += stack_usage_size; |
1293 | 1399 |
1294 /* ??? This is gross but the only safe stance in the absence | 1400 /* ??? This is gross but the only safe stance in the absence |
1295 of stack usage oriented flow analysis. */ | 1401 of stack usage oriented flow analysis. */ |
1296 if (!cannot_accumulate) | 1402 if (!cannot_accumulate) |
1297 current_function_has_unbounded_dynamic_stack_size = 1; | 1403 current_function_has_unbounded_dynamic_stack_size = 1; |
1298 } | 1404 } |
1299 | 1405 |
1300 final_label = NULL_RTX; | 1406 do_pending_stack_adjust (); |
1407 | |
1408 final_label = NULL; | |
1301 final_target = NULL_RTX; | 1409 final_target = NULL_RTX; |
1302 | 1410 |
1303 /* If we are splitting the stack, we need to ask the backend whether | 1411 /* If we are splitting the stack, we need to ask the backend whether |
1304 there is enough room on the current stack. If there isn't, or if | 1412 there is enough room on the current stack. If there isn't, or if |
1305 the backend doesn't know how to tell is, then we need to call a | 1413 the backend doesn't know how to tell is, then we need to call a |
1307 be released when we release the current stack segment. The | 1415 be released when we release the current stack segment. The |
1308 effect is that stack allocation becomes less efficient, but at | 1416 effect is that stack allocation becomes less efficient, but at |
1309 least it doesn't cause a stack overflow. */ | 1417 least it doesn't cause a stack overflow. */ |
1310 if (flag_split_stack) | 1418 if (flag_split_stack) |
1311 { | 1419 { |
1312 rtx available_label, ask, space, func; | 1420 rtx_code_label *available_label; |
1313 | 1421 rtx ask, space, func; |
1314 available_label = NULL_RTX; | 1422 |
1315 | 1423 available_label = NULL; |
1316 #ifdef HAVE_split_stack_space_check | 1424 |
1317 if (HAVE_split_stack_space_check) | 1425 if (targetm.have_split_stack_space_check ()) |
1318 { | 1426 { |
1319 available_label = gen_label_rtx (); | 1427 available_label = gen_label_rtx (); |
1320 | 1428 |
1321 /* This instruction will branch to AVAILABLE_LABEL if there | 1429 /* This instruction will branch to AVAILABLE_LABEL if there |
1322 are SIZE bytes available on the stack. */ | 1430 are SIZE bytes available on the stack. */ |
1323 emit_insn (gen_split_stack_space_check (size, available_label)); | 1431 emit_insn (targetm.gen_split_stack_space_check |
1432 (size, available_label)); | |
1324 } | 1433 } |
1325 #endif | |
1326 | 1434 |
1327 /* The __morestack_allocate_stack_space function will allocate | 1435 /* The __morestack_allocate_stack_space function will allocate |
1328 memory using malloc. If the alignment of the memory returned | 1436 memory using malloc. If the alignment of the memory returned |
1329 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to | 1437 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to |
1330 make sure we allocate enough space. */ | 1438 make sure we allocate enough space. */ |
1331 if (MALLOC_ABI_ALIGNMENT >= required_align) | 1439 if (MALLOC_ABI_ALIGNMENT >= required_align) |
1332 ask = size; | 1440 ask = size; |
1333 else | 1441 else |
1334 { | 1442 ask = expand_binop (Pmode, add_optab, size, |
1335 ask = expand_binop (Pmode, add_optab, size, | 1443 gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1336 GEN_INT (required_align / BITS_PER_UNIT - 1), | 1444 Pmode), |
1337 NULL_RTX, 1, OPTAB_LIB_WIDEN); | 1445 NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1338 must_align = true; | |
1339 } | |
1340 | 1446 |
1341 func = init_one_libfunc ("__morestack_allocate_stack_space"); | 1447 func = init_one_libfunc ("__morestack_allocate_stack_space"); |
1342 | 1448 |
1343 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, | 1449 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, |
1344 1, ask, Pmode); | 1450 ask, Pmode); |
1345 | 1451 |
1346 if (available_label == NULL_RTX) | 1452 if (available_label == NULL_RTX) |
1347 return space; | 1453 return space; |
1348 | 1454 |
1349 final_target = gen_reg_rtx (Pmode); | 1455 final_target = gen_reg_rtx (Pmode); |
1353 final_label = gen_label_rtx (); | 1459 final_label = gen_label_rtx (); |
1354 emit_jump (final_label); | 1460 emit_jump (final_label); |
1355 | 1461 |
1356 emit_label (available_label); | 1462 emit_label (available_label); |
1357 } | 1463 } |
1358 | |
1359 do_pending_stack_adjust (); | |
1360 | 1464 |
1361 /* We ought to be called always on the toplevel and stack ought to be aligned | 1465 /* We ought to be called always on the toplevel and stack ought to be aligned |
1362 properly. */ | 1466 properly. */ |
1363 gcc_assert (!(stack_pointer_delta | 1467 gcc_assert (!(stack_pointer_delta |
1364 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); | 1468 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); |
1369 ; | 1473 ; |
1370 else if (flag_stack_check == GENERIC_STACK_CHECK) | 1474 else if (flag_stack_check == GENERIC_STACK_CHECK) |
1371 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, | 1475 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, |
1372 size); | 1476 size); |
1373 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) | 1477 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) |
1374 probe_stack_range (STACK_CHECK_PROTECT, size); | 1478 probe_stack_range (get_stack_check_protect (), size); |
1479 | |
1480 /* Don't let anti_adjust_stack emit notes. */ | |
1481 suppress_reg_args_size = true; | |
1375 | 1482 |
1376 /* Perform the required allocation from the stack. Some systems do | 1483 /* Perform the required allocation from the stack. Some systems do |
1377 this differently than simply incrementing/decrementing from the | 1484 this differently than simply incrementing/decrementing from the |
1378 stack pointer, such as acquiring the space by calling malloc(). */ | 1485 stack pointer, such as acquiring the space by calling malloc(). */ |
1379 #ifdef HAVE_allocate_stack | 1486 if (targetm.have_allocate_stack ()) |
1380 if (HAVE_allocate_stack) | 1487 { |
1381 { | 1488 struct expand_operand ops[2]; |
1382 enum machine_mode mode = STACK_SIZE_MODE; | |
1383 insn_operand_predicate_fn pred; | |
1384 | |
1385 /* We don't have to check against the predicate for operand 0 since | 1489 /* We don't have to check against the predicate for operand 0 since |
1386 TARGET is known to be a pseudo of the proper mode, which must | 1490 TARGET is known to be a pseudo of the proper mode, which must |
1387 be valid for the operand. For operand 1, convert to the | 1491 be valid for the operand. */ |
1388 proper mode and validate. */ | 1492 create_fixed_operand (&ops[0], target); |
1389 if (mode == VOIDmode) | 1493 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true); |
1390 mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode; | 1494 expand_insn (targetm.code_for_allocate_stack, 2, ops); |
1391 | |
1392 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate; | |
1393 if (pred && ! ((*pred) (size, mode))) | |
1394 size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1)); | |
1395 | |
1396 emit_insn (gen_allocate_stack (target, size)); | |
1397 } | 1495 } |
1398 else | 1496 else |
1399 #endif | |
1400 { | 1497 { |
1401 int saved_stack_pointer_delta; | 1498 int saved_stack_pointer_delta; |
1402 | 1499 |
1403 #ifndef STACK_GROWS_DOWNWARD | 1500 if (!STACK_GROWS_DOWNWARD) |
1404 emit_move_insn (target, virtual_stack_dynamic_rtx); | 1501 emit_move_insn (target, virtual_stack_dynamic_rtx); |
1405 #endif | |
1406 | 1502 |
1407 /* Check stack bounds if necessary. */ | 1503 /* Check stack bounds if necessary. */ |
1408 if (crtl->limit_stack) | 1504 if (crtl->limit_stack) |
1409 { | 1505 { |
1410 rtx available; | 1506 rtx available; |
1411 rtx space_available = gen_label_rtx (); | 1507 rtx_code_label *space_available = gen_label_rtx (); |
1412 #ifdef STACK_GROWS_DOWNWARD | 1508 if (STACK_GROWS_DOWNWARD) |
1413 available = expand_binop (Pmode, sub_optab, | 1509 available = expand_binop (Pmode, sub_optab, |
1414 stack_pointer_rtx, stack_limit_rtx, | 1510 stack_pointer_rtx, stack_limit_rtx, |
1415 NULL_RTX, 1, OPTAB_WIDEN); | 1511 NULL_RTX, 1, OPTAB_WIDEN); |
1416 #else | 1512 else |
1417 available = expand_binop (Pmode, sub_optab, | 1513 available = expand_binop (Pmode, sub_optab, |
1418 stack_limit_rtx, stack_pointer_rtx, | 1514 stack_limit_rtx, stack_pointer_rtx, |
1419 NULL_RTX, 1, OPTAB_WIDEN); | 1515 NULL_RTX, 1, OPTAB_WIDEN); |
1420 #endif | 1516 |
1421 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, | 1517 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, |
1422 space_available); | 1518 space_available); |
1423 #ifdef HAVE_trap | 1519 if (targetm.have_trap ()) |
1424 if (HAVE_trap) | 1520 emit_insn (targetm.gen_trap ()); |
1425 emit_insn (gen_trap ()); | |
1426 else | 1521 else |
1427 #endif | |
1428 error ("stack limits not supported on this target"); | 1522 error ("stack limits not supported on this target"); |
1429 emit_barrier (); | 1523 emit_barrier (); |
1430 emit_label (space_available); | 1524 emit_label (space_available); |
1431 } | 1525 } |
1432 | 1526 |
1433 saved_stack_pointer_delta = stack_pointer_delta; | 1527 saved_stack_pointer_delta = stack_pointer_delta; |
1528 | |
1434 if (flag_stack_check && STACK_CHECK_MOVING_SP) | 1529 if (flag_stack_check && STACK_CHECK_MOVING_SP) |
1435 anti_adjust_stack_and_probe (size, false); | 1530 anti_adjust_stack_and_probe (size, false); |
1531 else if (flag_stack_clash_protection) | |
1532 anti_adjust_stack_and_probe_stack_clash (size); | |
1436 else | 1533 else |
1437 anti_adjust_stack (size); | 1534 anti_adjust_stack (size); |
1535 | |
1438 /* Even if size is constant, don't modify stack_pointer_delta. | 1536 /* Even if size is constant, don't modify stack_pointer_delta. |
1439 The constant size alloca should preserve | 1537 The constant size alloca should preserve |
1440 crtl->preferred_stack_boundary alignment. */ | 1538 crtl->preferred_stack_boundary alignment. */ |
1441 stack_pointer_delta = saved_stack_pointer_delta; | 1539 stack_pointer_delta = saved_stack_pointer_delta; |
1442 | 1540 |
1443 #ifdef STACK_GROWS_DOWNWARD | 1541 if (STACK_GROWS_DOWNWARD) |
1444 emit_move_insn (target, virtual_stack_dynamic_rtx); | 1542 emit_move_insn (target, virtual_stack_dynamic_rtx); |
1445 #endif | 1543 } |
1446 } | 1544 |
1545 suppress_reg_args_size = false; | |
1447 | 1546 |
1448 /* Finish up the split stack handling. */ | 1547 /* Finish up the split stack handling. */ |
1449 if (final_label != NULL_RTX) | 1548 if (final_label != NULL_RTX) |
1450 { | 1549 { |
1451 gcc_assert (flag_split_stack); | 1550 gcc_assert (flag_split_stack); |
1452 emit_move_insn (final_target, target); | 1551 emit_move_insn (final_target, target); |
1453 emit_label (final_label); | 1552 emit_label (final_label); |
1454 target = final_target; | 1553 target = final_target; |
1455 } | 1554 } |
1456 | 1555 |
1457 if (must_align) | 1556 target = align_dynamic_address (target, required_align); |
1458 { | |
1459 /* CEIL_DIV_EXPR needs to worry about the addition overflowing, | |
1460 but we know it can't. So add ourselves and then do | |
1461 TRUNC_DIV_EXPR. */ | |
1462 target = expand_binop (Pmode, add_optab, target, | |
1463 GEN_INT (required_align / BITS_PER_UNIT - 1), | |
1464 NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1465 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
1466 GEN_INT (required_align / BITS_PER_UNIT), | |
1467 NULL_RTX, 1); | |
1468 target = expand_mult (Pmode, target, | |
1469 GEN_INT (required_align / BITS_PER_UNIT), | |
1470 NULL_RTX, 1); | |
1471 } | |
1472 | 1557 |
1473 /* Now that we've committed to a return value, mark its alignment. */ | 1558 /* Now that we've committed to a return value, mark its alignment. */ |
1474 mark_reg_pointer (target, required_align); | 1559 mark_reg_pointer (target, required_align); |
1475 | 1560 |
1476 /* Record the new stack level for nonlocal gotos. */ | 1561 /* Record the new stack level. */ |
1477 if (cfun->nonlocal_goto_save_area != 0) | 1562 record_new_stack_level (); |
1478 update_nonlocal_goto_save_area (); | 1563 |
1564 return target; | |
1565 } | |
1566 | |
1567 /* Return an rtx representing the address of an area of memory already | |
1568 statically pushed onto the stack in the virtual stack vars area. (It is | |
1569 assumed that the area is allocated in the function prologue.) | |
1570 | |
1571 Any required stack pointer alignment is preserved. | |
1572 | |
1573 OFFSET is the offset of the area into the virtual stack vars area. | |
1574 | |
1575 REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1576 of memory. */ | |
1577 | |
1578 rtx | |
1579 get_dynamic_stack_base (HOST_WIDE_INT offset, unsigned required_align) | |
1580 { | |
1581 rtx target; | |
1582 | |
1583 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) | |
1584 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
1585 | |
1586 target = gen_reg_rtx (Pmode); | |
1587 emit_move_insn (target, virtual_stack_vars_rtx); | |
1588 target = expand_binop (Pmode, add_optab, target, | |
1589 gen_int_mode (offset, Pmode), | |
1590 NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1591 target = align_dynamic_address (target, required_align); | |
1592 | |
1593 /* Now that we've committed to a return value, mark its alignment. */ | |
1594 mark_reg_pointer (target, required_align); | |
1479 | 1595 |
1480 return target; | 1596 return target; |
1481 } | 1597 } |
1482 | 1598 |
1483 /* A front end may want to override GCC's stack checking by providing a | 1599 /* A front end may want to override GCC's stack checking by providing a |
1496 /* Emit one stack probe at ADDRESS, an address within the stack. */ | 1612 /* Emit one stack probe at ADDRESS, an address within the stack. */ |
1497 | 1613 |
1498 void | 1614 void |
1499 emit_stack_probe (rtx address) | 1615 emit_stack_probe (rtx address) |
1500 { | 1616 { |
1501 rtx memref = gen_rtx_MEM (word_mode, address); | 1617 if (targetm.have_probe_stack_address ()) |
1502 | 1618 emit_insn (targetm.gen_probe_stack_address (address)); |
1503 MEM_VOLATILE_P (memref) = 1; | |
1504 | |
1505 /* See if we have an insn to probe the stack. */ | |
1506 #ifdef HAVE_probe_stack | |
1507 if (HAVE_probe_stack) | |
1508 emit_insn (gen_probe_stack (memref)); | |
1509 else | 1619 else |
1510 #endif | 1620 { |
1511 emit_move_insn (memref, const0_rtx); | 1621 rtx memref = gen_rtx_MEM (word_mode, address); |
1622 | |
1623 MEM_VOLATILE_P (memref) = 1; | |
1624 | |
1625 /* See if we have an insn to probe the stack. */ | |
1626 if (targetm.have_probe_stack ()) | |
1627 emit_insn (targetm.gen_probe_stack (memref)); | |
1628 else | |
1629 emit_move_insn (memref, const0_rtx); | |
1630 } | |
1512 } | 1631 } |
1513 | 1632 |
1514 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. | 1633 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
1515 FIRST is a constant and size is a Pmode RTX. These are offsets from | 1634 FIRST is a constant and size is a Pmode RTX. These are offsets from |
1516 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add | 1635 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add |
1517 or subtract them from the stack pointer. */ | 1636 or subtract them from the stack pointer. */ |
1518 | 1637 |
1519 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) | 1638 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) |
1520 | 1639 |
1521 #ifdef STACK_GROWS_DOWNWARD | 1640 #if STACK_GROWS_DOWNWARD |
1522 #define STACK_GROW_OP MINUS | 1641 #define STACK_GROW_OP MINUS |
1523 #define STACK_GROW_OPTAB sub_optab | 1642 #define STACK_GROW_OPTAB sub_optab |
1524 #define STACK_GROW_OFF(off) -(off) | 1643 #define STACK_GROW_OFF(off) -(off) |
1525 #else | 1644 #else |
1526 #define STACK_GROW_OP PLUS | 1645 #define STACK_GROW_OP PLUS |
1539 if (stack_check_libfunc) | 1658 if (stack_check_libfunc) |
1540 { | 1659 { |
1541 rtx addr = memory_address (Pmode, | 1660 rtx addr = memory_address (Pmode, |
1542 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | 1661 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1543 stack_pointer_rtx, | 1662 stack_pointer_rtx, |
1544 plus_constant (size, first))); | 1663 plus_constant (Pmode, |
1545 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, | 1664 size, first))); |
1546 Pmode); | 1665 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode, |
1666 addr, Pmode); | |
1547 } | 1667 } |
1548 | 1668 |
1549 /* Next see if we have an insn to check the stack. */ | 1669 /* Next see if we have an insn to check the stack. */ |
1550 #ifdef HAVE_check_stack | 1670 else if (targetm.have_check_stack ()) |
1551 else if (HAVE_check_stack) | 1671 { |
1552 { | 1672 struct expand_operand ops[1]; |
1553 rtx addr = memory_address (Pmode, | 1673 rtx addr = memory_address (Pmode, |
1554 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | 1674 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1555 stack_pointer_rtx, | 1675 stack_pointer_rtx, |
1556 plus_constant (size, first))); | 1676 plus_constant (Pmode, |
1557 insn_operand_predicate_fn pred | 1677 size, first))); |
1558 = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate; | 1678 bool success; |
1559 if (pred && !((*pred) (addr, Pmode))) | 1679 create_input_operand (&ops[0], addr, Pmode); |
1560 addr = copy_to_mode_reg (Pmode, addr); | 1680 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops); |
1561 | 1681 gcc_assert (success); |
1562 emit_insn (gen_check_stack (addr)); | 1682 } |
1563 } | |
1564 #endif | |
1565 | 1683 |
1566 /* Otherwise we have to generate explicit probes. If we have a constant | 1684 /* Otherwise we have to generate explicit probes. If we have a constant |
1567 small number of them to generate, that's the easy case. */ | 1685 small number of them to generate, that's the easy case. */ |
1568 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) | 1686 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) |
1569 { | 1687 { |
1574 it exceeds SIZE. If only one probe is needed, this will not | 1692 it exceeds SIZE. If only one probe is needed, this will not |
1575 generate any code. Then probe at FIRST + SIZE. */ | 1693 generate any code. Then probe at FIRST + SIZE. */ |
1576 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | 1694 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) |
1577 { | 1695 { |
1578 addr = memory_address (Pmode, | 1696 addr = memory_address (Pmode, |
1579 plus_constant (stack_pointer_rtx, | 1697 plus_constant (Pmode, stack_pointer_rtx, |
1580 STACK_GROW_OFF (first + i))); | 1698 STACK_GROW_OFF (first + i))); |
1581 emit_stack_probe (addr); | 1699 emit_stack_probe (addr); |
1582 } | 1700 } |
1583 | 1701 |
1584 addr = memory_address (Pmode, | 1702 addr = memory_address (Pmode, |
1585 plus_constant (stack_pointer_rtx, | 1703 plus_constant (Pmode, stack_pointer_rtx, |
1586 STACK_GROW_OFF (first + isize))); | 1704 STACK_GROW_OFF (first + isize))); |
1587 emit_stack_probe (addr); | 1705 emit_stack_probe (addr); |
1588 } | 1706 } |
1589 | 1707 |
1590 /* In the variable case, do the same as above, but in a loop. Note that we | 1708 /* In the variable case, do the same as above, but in a loop. Note that we |
1593 be able to handle this case properly; in particular, we use an equality | 1711 be able to handle this case properly; in particular, we use an equality |
1594 test for the loop condition. */ | 1712 test for the loop condition. */ |
1595 else | 1713 else |
1596 { | 1714 { |
1597 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; | 1715 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; |
1598 rtx loop_lab = gen_label_rtx (); | 1716 rtx_code_label *loop_lab = gen_label_rtx (); |
1599 rtx end_lab = gen_label_rtx (); | 1717 rtx_code_label *end_lab = gen_label_rtx (); |
1600 | |
1601 | 1718 |
1602 /* Step 1: round SIZE to the previous multiple of the interval. */ | 1719 /* Step 1: round SIZE to the previous multiple of the interval. */ |
1603 | 1720 |
1604 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | 1721 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ |
1605 rounded_size | 1722 rounded_size |
1606 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL)); | 1723 = simplify_gen_binary (AND, Pmode, size, |
1724 gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
1607 rounded_size_op = force_operand (rounded_size, NULL_RTX); | 1725 rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1608 | 1726 |
1609 | 1727 |
1610 /* Step 2: compute initial and final value of the loop counter. */ | 1728 /* Step 2: compute initial and final value of the loop counter. */ |
1611 | 1729 |
1612 /* TEST_ADDR = SP + FIRST. */ | 1730 /* TEST_ADDR = SP + FIRST. */ |
1613 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | 1731 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1614 stack_pointer_rtx, | 1732 stack_pointer_rtx, |
1615 GEN_INT (first)), NULL_RTX); | 1733 gen_int_mode (first, Pmode)), |
1734 NULL_RTX); | |
1616 | 1735 |
1617 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ | 1736 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ |
1618 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | 1737 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1619 test_addr, | 1738 test_addr, |
1620 rounded_size_op), NULL_RTX); | 1739 rounded_size_op), NULL_RTX); |
1637 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, | 1756 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, |
1638 end_lab); | 1757 end_lab); |
1639 | 1758 |
1640 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ | 1759 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ |
1641 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, | 1760 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, |
1642 GEN_INT (PROBE_INTERVAL), test_addr, | 1761 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr, |
1643 1, OPTAB_WIDEN); | 1762 1, OPTAB_WIDEN); |
1644 | 1763 |
1645 gcc_assert (temp == test_addr); | 1764 gcc_assert (temp == test_addr); |
1646 | 1765 |
1647 /* Probe at TEST_ADDR. */ | 1766 /* Probe at TEST_ADDR. */ |
1664 if (CONST_INT_P (temp)) | 1783 if (CONST_INT_P (temp)) |
1665 { | 1784 { |
1666 /* Use [base + disp} addressing mode if supported. */ | 1785 /* Use [base + disp} addressing mode if supported. */ |
1667 HOST_WIDE_INT offset = INTVAL (temp); | 1786 HOST_WIDE_INT offset = INTVAL (temp); |
1668 addr = memory_address (Pmode, | 1787 addr = memory_address (Pmode, |
1669 plus_constant (last_addr, | 1788 plus_constant (Pmode, last_addr, |
1670 STACK_GROW_OFF (offset))); | 1789 STACK_GROW_OFF (offset))); |
1671 } | 1790 } |
1672 else | 1791 else |
1673 { | 1792 { |
1674 /* Manual CSE if the difference is not known at compile-time. */ | 1793 /* Manual CSE if the difference is not known at compile-time. */ |
1679 } | 1798 } |
1680 | 1799 |
1681 emit_stack_probe (addr); | 1800 emit_stack_probe (addr); |
1682 } | 1801 } |
1683 } | 1802 } |
1684 } | 1803 |
1804 /* Make sure nothing is scheduled before we are done. */ | |
1805 emit_insn (gen_blockage ()); | |
1806 } | |
1807 | |
1808 /* Compute parameters for stack clash probing a dynamic stack | |
1809 allocation of SIZE bytes. | |
1810 | |
1811 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL. | |
1812 | |
1813 Additionally we conditionally dump the type of probing that will | |
1814 be needed given the values computed. */ | |
1815 | |
1816 void | |
1817 compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr, | |
1818 rtx *residual, | |
1819 HOST_WIDE_INT *probe_interval, | |
1820 rtx size) | |
1821 { | |
1822 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */ | |
1823 *probe_interval | |
1824 = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL); | |
1825 *rounded_size = simplify_gen_binary (AND, Pmode, size, | |
1826 GEN_INT (-*probe_interval)); | |
1827 | |
1828 /* Compute the value of the stack pointer for the last iteration. | |
1829 It's just SP + ROUNDED_SIZE. */ | |
1830 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX); | |
1831 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1832 stack_pointer_rtx, | |
1833 rounded_size_op), | |
1834 NULL_RTX); | |
1835 | |
1836 /* Compute any residuals not allocated by the loop above. Residuals | |
1837 are just the ROUNDED_SIZE - SIZE. */ | |
1838 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size); | |
1839 | |
1840 /* Dump key information to make writing tests easy. */ | |
1841 if (dump_file) | |
1842 { | |
1843 if (*rounded_size == CONST0_RTX (Pmode)) | |
1844 fprintf (dump_file, | |
1845 "Stack clash skipped dynamic allocation and probing loop.\n"); | |
1846 else if (CONST_INT_P (*rounded_size) | |
1847 && INTVAL (*rounded_size) <= 4 * *probe_interval) | |
1848 fprintf (dump_file, | |
1849 "Stack clash dynamic allocation and probing inline.\n"); | |
1850 else if (CONST_INT_P (*rounded_size)) | |
1851 fprintf (dump_file, | |
1852 "Stack clash dynamic allocation and probing in " | |
1853 "rotated loop.\n"); | |
1854 else | |
1855 fprintf (dump_file, | |
1856 "Stack clash dynamic allocation and probing in loop.\n"); | |
1857 | |
1858 if (*residual != CONST0_RTX (Pmode)) | |
1859 fprintf (dump_file, | |
1860 "Stack clash dynamic allocation and probing residuals.\n"); | |
1861 else | |
1862 fprintf (dump_file, | |
1863 "Stack clash skipped dynamic allocation and " | |
1864 "probing residuals.\n"); | |
1865 } | |
1866 } | |
1867 | |
1868 /* Emit the start of an allocate/probe loop for stack | |
1869 clash protection. | |
1870 | |
1871 LOOP_LAB and END_LAB are returned for use when we emit the | |
1872 end of the loop. | |
1873 | |
1874 LAST addr is the value for SP which stops the loop. */ | |
1875 void | |
1876 emit_stack_clash_protection_probe_loop_start (rtx *loop_lab, | |
1877 rtx *end_lab, | |
1878 rtx last_addr, | |
1879 bool rotated) | |
1880 { | |
1881 /* Essentially we want to emit any setup code, the top of loop | |
1882 label and the comparison at the top of the loop. */ | |
1883 *loop_lab = gen_label_rtx (); | |
1884 *end_lab = gen_label_rtx (); | |
1885 | |
1886 emit_label (*loop_lab); | |
1887 if (!rotated) | |
1888 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX, | |
1889 Pmode, 1, *end_lab); | |
1890 } | |
1891 | |
1892 /* Emit the end of a stack clash probing loop. | |
1893 | |
1894 This consists of just the jump back to LOOP_LAB and | |
1895 emitting END_LOOP after the loop. */ | |
1896 | |
1897 void | |
1898 emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop, | |
1899 rtx last_addr, bool rotated) | |
1900 { | |
1901 if (rotated) | |
1902 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX, | |
1903 Pmode, 1, loop_lab); | |
1904 else | |
1905 emit_jump (loop_lab); | |
1906 | |
1907 emit_label (end_loop); | |
1908 | |
1909 } | |
1910 | |
1911 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) | |
1912 while probing it. This pushes when SIZE is positive. SIZE need not | |
1913 be constant. | |
1914 | |
1915 This is subtly different than anti_adjust_stack_and_probe to try and | |
1916 prevent stack-clash attacks | |
1917 | |
1918 1. It must assume no knowledge of the probing state, any allocation | |
1919 must probe. | |
1920 | |
1921 Consider the case of a 1 byte alloca in a loop. If the sum of the | |
1922 allocations is large, then this could be used to jump the guard if | |
1923 probes were not emitted. | |
1924 | |
1925 2. It never skips probes, whereas anti_adjust_stack_and_probe will | |
1926 skip probes on the first couple PROBE_INTERVALs on the assumption | |
1927 they're done elsewhere. | |
1928 | |
1929 3. It only allocates and probes SIZE bytes, it does not need to | |
1930 allocate/probe beyond that because this probing style does not | |
1931 guarantee signal handling capability if the guard is hit. */ | |
1932 | |
1933 static void | |
1934 anti_adjust_stack_and_probe_stack_clash (rtx size) | |
1935 { | |
1936 /* First ensure SIZE is Pmode. */ | |
1937 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1938 size = convert_to_mode (Pmode, size, 1); | |
1939 | |
1940 /* We can get here with a constant size on some targets. */ | |
1941 rtx rounded_size, last_addr, residual; | |
1942 HOST_WIDE_INT probe_interval; | |
1943 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr, | |
1944 &residual, &probe_interval, size); | |
1945 | |
1946 if (rounded_size != CONST0_RTX (Pmode)) | |
1947 { | |
1948 if (CONST_INT_P (rounded_size) | |
1949 && INTVAL (rounded_size) <= 4 * probe_interval) | |
1950 { | |
1951 for (HOST_WIDE_INT i = 0; | |
1952 i < INTVAL (rounded_size); | |
1953 i += probe_interval) | |
1954 { | |
1955 anti_adjust_stack (GEN_INT (probe_interval)); | |
1956 | |
1957 /* The prologue does not probe residuals. Thus the offset | |
1958 here to probe just beyond what the prologue had already | |
1959 allocated. */ | |
1960 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, | |
1961 (probe_interval | |
1962 - GET_MODE_SIZE (word_mode)))); | |
1963 emit_insn (gen_blockage ()); | |
1964 } | |
1965 } | |
1966 else | |
1967 { | |
1968 rtx loop_lab, end_loop; | |
1969 bool rotate_loop = CONST_INT_P (rounded_size); | |
1970 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop, | |
1971 last_addr, rotate_loop); | |
1972 | |
1973 anti_adjust_stack (GEN_INT (probe_interval)); | |
1974 | |
1975 /* The prologue does not probe residuals. Thus the offset here | |
1976 to probe just beyond what the prologue had already allocated. */ | |
1977 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, | |
1978 (probe_interval | |
1979 - GET_MODE_SIZE (word_mode)))); | |
1980 | |
1981 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop, | |
1982 last_addr, rotate_loop); | |
1983 emit_insn (gen_blockage ()); | |
1984 } | |
1985 } | |
1986 | |
1987 if (residual != CONST0_RTX (Pmode)) | |
1988 { | |
1989 rtx x = force_reg (Pmode, plus_constant (Pmode, residual, | |
1990 -GET_MODE_SIZE (word_mode))); | |
1991 anti_adjust_stack (residual); | |
1992 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x)); | |
1993 emit_insn (gen_blockage ()); | |
1994 } | |
1995 | |
1996 /* Some targets make optimistic assumptions in their prologues about | |
1997 how the caller may have probed the stack. Make sure we honor | |
1998 those assumptions when needed. */ | |
1999 if (size != CONST0_RTX (Pmode) | |
2000 && targetm.stack_clash_protection_final_dynamic_probe (residual)) | |
2001 { | |
2002 /* Ideally we would just probe at *sp. However, if SIZE is not | |
2003 a compile-time constant, but is zero at runtime, then *sp | |
2004 might hold live data. So probe at *sp if we know that | |
2005 an allocation was made, otherwise probe into the red zone | |
2006 which is obviously undesirable. */ | |
2007 if (CONST_INT_P (size)) | |
2008 { | |
2009 emit_stack_probe (stack_pointer_rtx); | |
2010 emit_insn (gen_blockage ()); | |
2011 } | |
2012 else | |
2013 { | |
2014 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, | |
2015 -GET_MODE_SIZE (word_mode))); | |
2016 emit_insn (gen_blockage ()); | |
2017 } | |
2018 } | |
2019 } | |
2020 | |
1685 | 2021 |
1686 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) | 2022 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) |
1687 while probing it. This pushes when SIZE is positive. SIZE need not | 2023 while probing it. This pushes when SIZE is positive. SIZE need not |
1688 be constant. If ADJUST_BACK is true, adjust back the stack pointer | 2024 be constant. If ADJUST_BACK is true, adjust back the stack pointer |
1689 by plus SIZE at the end. */ | 2025 by plus SIZE at the end. */ |
1722 anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | 2058 anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); |
1723 emit_stack_probe (stack_pointer_rtx); | 2059 emit_stack_probe (stack_pointer_rtx); |
1724 } | 2060 } |
1725 | 2061 |
1726 if (first_probe) | 2062 if (first_probe) |
1727 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope)); | 2063 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
1728 else | 2064 else |
1729 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i)); | 2065 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i)); |
1730 emit_stack_probe (stack_pointer_rtx); | 2066 emit_stack_probe (stack_pointer_rtx); |
1731 } | 2067 } |
1732 | 2068 |
1733 /* In the variable case, do the same as above, but in a loop. Note that we | 2069 /* In the variable case, do the same as above, but in a loop. Note that we |
1734 must be extra careful with variables wrapping around because we might be | 2070 must be extra careful with variables wrapping around because we might be |
1736 be able to handle this case properly; in particular, we use an equality | 2072 be able to handle this case properly; in particular, we use an equality |
1737 test for the loop condition. */ | 2073 test for the loop condition. */ |
1738 else | 2074 else |
1739 { | 2075 { |
1740 rtx rounded_size, rounded_size_op, last_addr, temp; | 2076 rtx rounded_size, rounded_size_op, last_addr, temp; |
1741 rtx loop_lab = gen_label_rtx (); | 2077 rtx_code_label *loop_lab = gen_label_rtx (); |
1742 rtx end_lab = gen_label_rtx (); | 2078 rtx_code_label *end_lab = gen_label_rtx (); |
1743 | 2079 |
1744 | 2080 |
1745 /* Step 1: round SIZE to the previous multiple of the interval. */ | 2081 /* Step 1: round SIZE to the previous multiple of the interval. */ |
1746 | 2082 |
1747 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | 2083 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ |
1748 rounded_size | 2084 rounded_size |
1749 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL)); | 2085 = simplify_gen_binary (AND, Pmode, size, |
2086 gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
1750 rounded_size_op = force_operand (rounded_size, NULL_RTX); | 2087 rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1751 | 2088 |
1752 | 2089 |
1753 /* Step 2: compute initial and final value of the loop counter. */ | 2090 /* Step 2: compute initial and final value of the loop counter. */ |
1754 | 2091 |
1802 } | 2139 } |
1803 } | 2140 } |
1804 | 2141 |
1805 /* Adjust back and account for the additional first interval. */ | 2142 /* Adjust back and account for the additional first interval. */ |
1806 if (adjust_back) | 2143 if (adjust_back) |
1807 adjust_stack (plus_constant (size, PROBE_INTERVAL + dope)); | 2144 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
1808 else | 2145 else |
1809 adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | 2146 adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); |
1810 } | 2147 } |
1811 | 2148 |
1812 /* Return an rtx representing the register or memory location | 2149 /* Return an rtx representing the register or memory location |
1828 | 2165 |
1829 if (REG_P (val) | 2166 if (REG_P (val) |
1830 && GET_MODE (val) == BLKmode) | 2167 && GET_MODE (val) == BLKmode) |
1831 { | 2168 { |
1832 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); | 2169 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); |
1833 enum machine_mode tmpmode; | 2170 opt_scalar_int_mode tmpmode; |
1834 | 2171 |
1835 /* int_size_in_bytes can return -1. We don't need a check here | 2172 /* int_size_in_bytes can return -1. We don't need a check here |
1836 since the value of bytes will then be large enough that no | 2173 since the value of bytes will then be large enough that no |
1837 mode will match anyway. */ | 2174 mode will match anyway. */ |
1838 | 2175 |
1839 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | 2176 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT) |
1840 tmpmode != VOIDmode; | |
1841 tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1842 { | 2177 { |
1843 /* Have we found a large enough mode? */ | 2178 /* Have we found a large enough mode? */ |
1844 if (GET_MODE_SIZE (tmpmode) >= bytes) | 2179 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes) |
1845 break; | 2180 break; |
1846 } | 2181 } |
1847 | 2182 |
1848 /* No suitable mode found. */ | 2183 PUT_MODE (val, tmpmode.require ()); |
1849 gcc_assert (tmpmode != VOIDmode); | |
1850 | |
1851 PUT_MODE (val, tmpmode); | |
1852 } | 2184 } |
1853 return val; | 2185 return val; |
1854 } | 2186 } |
1855 | 2187 |
1856 /* Return an rtx representing the register or memory location | 2188 /* Return an rtx representing the register or memory location |
1857 in which a scalar value of mode MODE was returned by a library call. */ | 2189 in which a scalar value of mode MODE was returned by a library call. */ |
1858 | 2190 |
1859 rtx | 2191 rtx |
1860 hard_libcall_value (enum machine_mode mode, rtx fun) | 2192 hard_libcall_value (machine_mode mode, rtx fun) |
1861 { | 2193 { |
1862 return targetm.calls.libcall_value (mode, fun); | 2194 return targetm.calls.libcall_value (mode, fun); |
1863 } | 2195 } |
1864 | 2196 |
1865 /* Look up the tree code for a given rtx code | 2197 /* Look up the tree code for a given rtx code |
1866 to provide the arithmetic operation for REAL_ARITHMETIC. | 2198 to provide the arithmetic operation for real_arithmetic. |
1867 The function returns an int because the caller may not know | 2199 The function returns an int because the caller may not know |
1868 what `enum tree_code' means. */ | 2200 what `enum tree_code' means. */ |
1869 | 2201 |
1870 int | 2202 int |
1871 rtx_to_tree_code (enum rtx_code code) | 2203 rtx_to_tree_code (enum rtx_code code) |