comparison gcc/explow.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Subroutines for manipulating rtx's in semantically interesting ways. 1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
54 /* Not scalar_int_mode because we also allow pointer bound modes. */ 54 /* Not scalar_int_mode because we also allow pointer bound modes. */
55 scalar_mode smode = as_a <scalar_mode> (mode); 55 scalar_mode smode = as_a <scalar_mode> (mode);
56 int width = GET_MODE_PRECISION (smode); 56 int width = GET_MODE_PRECISION (smode);
57 57
58 /* You want to truncate to a _what_? */ 58 /* You want to truncate to a _what_? */
59 gcc_assert (SCALAR_INT_MODE_P (mode) 59 gcc_assert (SCALAR_INT_MODE_P (mode));
60 || POINTER_BOUNDS_MODE_P (mode));
61 60
62 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ 61 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
63 if (smode == BImode) 62 if (smode == BImode)
64 return c & 1 ? STORE_FLAG_VALUE : 0; 63 return c & 1 ? STORE_FLAG_VALUE : 0;
65 64
75 } 74 }
76 75
77 return c; 76 return c;
78 } 77 }
79 78
79 /* Likewise for polynomial values, using the sign-extended representation
80 for each individual coefficient. */
81
82 poly_int64
83 trunc_int_for_mode (poly_int64 x, machine_mode mode)
84 {
85 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
86 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
87 return x;
88 }
89
80 /* Return an rtx for the sum of X and the integer C, given that X has 90 /* Return an rtx for the sum of X and the integer C, given that X has
81 mode MODE. INPLACE is true if X can be modified inplace or false 91 mode MODE. INPLACE is true if X can be modified inplace or false
82 if it must be treated as immutable. */ 92 if it must be treated as immutable. */
83 93
84 rtx 94 rtx
85 plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c, 95 plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
86 bool inplace)
87 { 96 {
88 RTX_CODE code; 97 RTX_CODE code;
89 rtx y; 98 rtx y;
90 rtx tem; 99 rtx tem;
91 int all_constant = 0; 100 int all_constant = 0;
92 101
93 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode); 102 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
94 103
95 if (c == 0) 104 if (known_eq (c, 0))
96 return x; 105 return x;
97 106
98 restart: 107 restart:
99 108
100 code = GET_CODE (x); 109 code = GET_CODE (x);
178 c = 0; 187 c = 0;
179 } 188 }
180 break; 189 break;
181 190
182 default: 191 default:
192 if (CONST_POLY_INT_P (x))
193 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
183 break; 194 break;
184 } 195 }
185 196
186 if (c != 0) 197 if (maybe_ne (c, 0))
187 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode)); 198 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
188 199
189 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) 200 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
190 return x; 201 return x;
191 else if (all_constant) 202 else if (all_constant)
208 if (GET_CODE (x) != PLUS) 219 if (GET_CODE (x) != PLUS)
209 return x; 220 return x;
210 221
211 /* First handle constants appearing at this level explicitly. */ 222 /* First handle constants appearing at this level explicitly. */
212 if (CONST_INT_P (XEXP (x, 1)) 223 if (CONST_INT_P (XEXP (x, 1))
213 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, 224 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
214 XEXP (x, 1))) 225 XEXP (x, 1))) != 0
215 && CONST_INT_P (tem)) 226 && CONST_INT_P (tem))
216 { 227 {
217 *constptr = tem; 228 *constptr = tem;
218 return eliminate_constant_term (XEXP (x, 0), constptr); 229 return eliminate_constant_term (XEXP (x, 0), constptr);
219 } 230 }
220 231
221 tem = const0_rtx; 232 tem = const0_rtx;
222 x0 = eliminate_constant_term (XEXP (x, 0), &tem); 233 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
223 x1 = eliminate_constant_term (XEXP (x, 1), &tem); 234 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
224 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) 235 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
225 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), 236 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
226 *constptr, tem)) 237 *constptr, tem)) != 0
227 && CONST_INT_P (tem)) 238 && CONST_INT_P (tem))
228 { 239 {
229 *constptr = tem; 240 *constptr = tem;
230 return gen_rtx_PLUS (GET_MODE (x), x0, x1); 241 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
231 } 242 }
927 temp = single_set (insn); 938 temp = single_set (insn);
928 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx); 939 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
929 } 940 }
930 941
931 if (!suppress_reg_args_size) 942 if (!suppress_reg_args_size)
932 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); 943 add_args_size_note (insn, stack_pointer_delta);
933 } 944 }
934 945
935 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). 946 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
936 This pops when ADJUST is positive. ADJUST need not be constant. */ 947 This pops when ADJUST is positive. ADJUST need not be constant. */
937 948
941 if (adjust == const0_rtx) 952 if (adjust == const0_rtx)
942 return; 953 return;
943 954
944 /* We expect all variable sized adjustments to be multiple of 955 /* We expect all variable sized adjustments to be multiple of
945 PREFERRED_STACK_BOUNDARY. */ 956 PREFERRED_STACK_BOUNDARY. */
946 if (CONST_INT_P (adjust)) 957 poly_int64 const_adjust;
947 stack_pointer_delta -= INTVAL (adjust); 958 if (poly_int_rtx_p (adjust, &const_adjust))
959 stack_pointer_delta -= const_adjust;
948 960
949 adjust_stack_1 (adjust, false); 961 adjust_stack_1 (adjust, false);
950 } 962 }
951 963
952 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). 964 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
958 if (adjust == const0_rtx) 970 if (adjust == const0_rtx)
959 return; 971 return;
960 972
961 /* We expect all variable sized adjustments to be multiple of 973 /* We expect all variable sized adjustments to be multiple of
962 PREFERRED_STACK_BOUNDARY. */ 974 PREFERRED_STACK_BOUNDARY. */
963 if (CONST_INT_P (adjust)) 975 poly_int64 const_adjust;
964 stack_pointer_delta += INTVAL (adjust); 976 if (poly_int_rtx_p (adjust, &const_adjust))
977 stack_pointer_delta += const_adjust;
965 978
966 adjust_stack_1 (adjust, true); 979 adjust_stack_1 (adjust, true);
967 } 980 }
968 981
969 /* Round the size of a block to be pushed up to the boundary required 982 /* Round the size of a block to be pushed up to the boundary required
1204 void 1217 void
1205 get_dynamic_stack_size (rtx *psize, unsigned size_align, 1218 get_dynamic_stack_size (rtx *psize, unsigned size_align,
1206 unsigned required_align, 1219 unsigned required_align,
1207 HOST_WIDE_INT *pstack_usage_size) 1220 HOST_WIDE_INT *pstack_usage_size)
1208 { 1221 {
1209 unsigned extra = 0;
1210 rtx size = *psize; 1222 rtx size = *psize;
1211 1223
1212 /* Ensure the size is in the proper mode. */ 1224 /* Ensure the size is in the proper mode. */
1213 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) 1225 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1214 size = convert_to_mode (Pmode, size, 1); 1226 size = convert_to_mode (Pmode, size, 1);
1240 know the final value of the STACK_DYNAMIC_OFFSET used in function.c 1252 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1241 (it might depend on the size of the outgoing parameter lists, for 1253 (it might depend on the size of the outgoing parameter lists, for
1242 example), so we must preventively align the value. We leave space 1254 example), so we must preventively align the value. We leave space
1243 in SIZE for the hole that might result from the alignment operation. */ 1255 in SIZE for the hole that might result from the alignment operation. */
1244 1256
1245 /* Since the stack is presumed to be aligned before this allocation, 1257 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1246 we only need to increase the size of the allocation if the required 1258 if (known_align == 0)
1247 alignment is more than the stack alignment. */ 1259 known_align = BITS_PER_UNIT;
1248 if (required_align > STACK_BOUNDARY) 1260 if (required_align > known_align)
1249 { 1261 {
1250 extra = (required_align - STACK_BOUNDARY) / BITS_PER_UNIT; 1262 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
1251 size = plus_constant (Pmode, size, extra); 1263 size = plus_constant (Pmode, size, extra);
1252 size = force_operand (size, NULL_RTX); 1264 size = force_operand (size, NULL_RTX);
1253 if (size_align > STACK_BOUNDARY) 1265 if (size_align > known_align)
1254 size_align = STACK_BOUNDARY; 1266 size_align = known_align;
1255 1267
1256 if (flag_stack_usage_info && pstack_usage_size) 1268 if (flag_stack_usage_info && pstack_usage_size)
1257 *pstack_usage_size += extra; 1269 *pstack_usage_size += extra;
1258 } 1270 }
1259 1271
1462 emit_label (available_label); 1474 emit_label (available_label);
1463 } 1475 }
1464 1476
1465 /* We ought to be called always on the toplevel and stack ought to be aligned 1477 /* We ought to be called always on the toplevel and stack ought to be aligned
1466 properly. */ 1478 properly. */
1467 gcc_assert (!(stack_pointer_delta 1479 gcc_assert (multiple_p (stack_pointer_delta,
1468 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); 1480 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1469 1481
1470 /* If needed, check that we have the required amount of stack. Take into 1482 /* If needed, check that we have the required amount of stack. Take into
1471 account what has already been checked. */ 1483 account what has already been checked. */
1472 if (STACK_CHECK_MOVING_SP) 1484 if (STACK_CHECK_MOVING_SP)
1473 ; 1485 ;
1493 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true); 1505 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1494 expand_insn (targetm.code_for_allocate_stack, 2, ops); 1506 expand_insn (targetm.code_for_allocate_stack, 2, ops);
1495 } 1507 }
1496 else 1508 else
1497 { 1509 {
1498 int saved_stack_pointer_delta; 1510 poly_int64 saved_stack_pointer_delta;
1499 1511
1500 if (!STACK_GROWS_DOWNWARD) 1512 if (!STACK_GROWS_DOWNWARD)
1501 emit_move_insn (target, virtual_stack_dynamic_rtx); 1513 emit_move_insn (target, virtual_stack_dynamic_rtx);
1502 1514
1503 /* Check stack bounds if necessary. */ 1515 /* Check stack bounds if necessary. */
1574 1586
1575 REQUIRED_ALIGN is the alignment (in bits) required for the region 1587 REQUIRED_ALIGN is the alignment (in bits) required for the region
1576 of memory. */ 1588 of memory. */
1577 1589
1578 rtx 1590 rtx
1579 get_dynamic_stack_base (HOST_WIDE_INT offset, unsigned required_align) 1591 get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
1580 { 1592 {
1581 rtx target; 1593 rtx target;
1582 1594
1583 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) 1595 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1584 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; 1596 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1613 1625
1614 void 1626 void
1615 emit_stack_probe (rtx address) 1627 emit_stack_probe (rtx address)
1616 { 1628 {
1617 if (targetm.have_probe_stack_address ()) 1629 if (targetm.have_probe_stack_address ())
1618 emit_insn (targetm.gen_probe_stack_address (address)); 1630 {
1631 struct expand_operand ops[1];
1632 insn_code icode = targetm.code_for_probe_stack_address;
1633 create_address_operand (ops, address);
1634 maybe_legitimize_operands (icode, 0, 1, ops);
1635 expand_insn (icode, 1, ops);
1636 }
1619 else 1637 else
1620 { 1638 {
1621 rtx memref = gen_rtx_MEM (word_mode, address); 1639 rtx memref = gen_rtx_MEM (word_mode, address);
1622 1640
1623 MEM_VOLATILE_P (memref) = 1; 1641 MEM_VOLATILE_P (memref) = 1;
1642 memref = validize_mem (memref);
1624 1643
1625 /* See if we have an insn to probe the stack. */ 1644 /* See if we have an insn to probe the stack. */
1626 if (targetm.have_probe_stack ()) 1645 if (targetm.have_probe_stack ())
1627 emit_insn (targetm.gen_probe_stack (memref)); 1646 emit_insn (targetm.gen_probe_stack (memref));
1628 else 1647 else
1629 emit_move_insn (memref, const0_rtx); 1648 emit_move_insn (memref, const0_rtx);
1630 } 1649 }
1631 } 1650 }
1632 1651
1633 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. 1652 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1634 FIRST is a constant and size is a Pmode RTX. These are offsets from 1653 FIRST is a constant and size is a Pmode RTX. These are offsets from
1937 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) 1956 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1938 size = convert_to_mode (Pmode, size, 1); 1957 size = convert_to_mode (Pmode, size, 1);
1939 1958
1940 /* We can get here with a constant size on some targets. */ 1959 /* We can get here with a constant size on some targets. */
1941 rtx rounded_size, last_addr, residual; 1960 rtx rounded_size, last_addr, residual;
1942 HOST_WIDE_INT probe_interval; 1961 HOST_WIDE_INT probe_interval, probe_range;
1962 bool target_probe_range_p = false;
1943 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr, 1963 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1944 &residual, &probe_interval, size); 1964 &residual, &probe_interval, size);
1965
1966 /* Get the back-end specific probe ranges. */
1967 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
1968 target_probe_range_p = probe_range != 0;
1969 gcc_assert (probe_range >= 0);
1970
1971 /* If no back-end specific range defined, default to the top of the newly
1972 allocated range. */
1973 if (probe_range == 0)
1974 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
1945 1975
1946 if (rounded_size != CONST0_RTX (Pmode)) 1976 if (rounded_size != CONST0_RTX (Pmode))
1947 { 1977 {
1948 if (CONST_INT_P (rounded_size) 1978 if (CONST_INT_P (rounded_size)
1949 && INTVAL (rounded_size) <= 4 * probe_interval) 1979 && INTVAL (rounded_size) <= 4 * probe_interval)
1951 for (HOST_WIDE_INT i = 0; 1981 for (HOST_WIDE_INT i = 0;
1952 i < INTVAL (rounded_size); 1982 i < INTVAL (rounded_size);
1953 i += probe_interval) 1983 i += probe_interval)
1954 { 1984 {
1955 anti_adjust_stack (GEN_INT (probe_interval)); 1985 anti_adjust_stack (GEN_INT (probe_interval));
1956
1957 /* The prologue does not probe residuals. Thus the offset 1986 /* The prologue does not probe residuals. Thus the offset
1958 here to probe just beyond what the prologue had already 1987 here to probe just beyond what the prologue had already
1959 allocated. */ 1988 allocated. */
1960 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, 1989 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
1961 (probe_interval 1990 probe_range));
1962 - GET_MODE_SIZE (word_mode)))); 1991
1963 emit_insn (gen_blockage ()); 1992 emit_insn (gen_blockage ());
1964 } 1993 }
1965 } 1994 }
1966 else 1995 else
1967 { 1996 {
1971 last_addr, rotate_loop); 2000 last_addr, rotate_loop);
1972 2001
1973 anti_adjust_stack (GEN_INT (probe_interval)); 2002 anti_adjust_stack (GEN_INT (probe_interval));
1974 2003
1975 /* The prologue does not probe residuals. Thus the offset here 2004 /* The prologue does not probe residuals. Thus the offset here
1976 to probe just beyond what the prologue had already allocated. */ 2005 to probe just beyond what the prologue had already
2006 allocated. */
1977 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, 2007 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
1978 (probe_interval 2008 probe_range));
1979 - GET_MODE_SIZE (word_mode))));
1980 2009
1981 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop, 2010 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
1982 last_addr, rotate_loop); 2011 last_addr, rotate_loop);
1983 emit_insn (gen_blockage ()); 2012 emit_insn (gen_blockage ());
1984 } 2013 }
1985 } 2014 }
1986 2015
1987 if (residual != CONST0_RTX (Pmode)) 2016 if (residual != CONST0_RTX (Pmode))
1988 { 2017 {
1989 rtx x = force_reg (Pmode, plus_constant (Pmode, residual, 2018 rtx label = NULL_RTX;
1990 -GET_MODE_SIZE (word_mode))); 2019 /* RESIDUAL could be zero at runtime and in that case *sp could
2020 hold live data. Furthermore, we do not want to probe into the
2021 red zone.
2022
2023 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2024 probe at offset 0. In which case we no longer have to check for
2025 RESIDUAL == 0. However we still need to probe at the right offset
2026 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2027
2028 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2029 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2030 */
1991 anti_adjust_stack (residual); 2031 anti_adjust_stack (residual);
1992 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x)); 2032
1993 emit_insn (gen_blockage ()); 2033 if (!CONST_INT_P (residual))
1994 }
1995
1996 /* Some targets make optimistic assumptions in their prologues about
1997 how the caller may have probed the stack. Make sure we honor
1998 those assumptions when needed. */
1999 if (size != CONST0_RTX (Pmode)
2000 && targetm.stack_clash_protection_final_dynamic_probe (residual))
2001 {
2002 /* Ideally we would just probe at *sp. However, if SIZE is not
2003 a compile-time constant, but is zero at runtime, then *sp
2004 might hold live data. So probe at *sp if we know that
2005 an allocation was made, otherwise probe into the red zone
2006 which is obviously undesirable. */
2007 if (CONST_INT_P (size))
2008 { 2034 {
2009 emit_stack_probe (stack_pointer_rtx); 2035 label = gen_label_rtx ();
2010 emit_insn (gen_blockage ()); 2036 rtx_code op = target_probe_range_p ? LT : EQ;
2037 rtx probe_cmp_value = target_probe_range_p
2038 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2039 : CONST0_RTX (GET_MODE (residual));
2040
2041 if (target_probe_range_p)
2042 emit_stack_probe (stack_pointer_rtx);
2043
2044 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2045 op, NULL_RTX, Pmode, 1, label);
2046 }
2047
2048 rtx x = NULL_RTX;
2049
2050 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2051 by the ABI defined safe value. */
2052 if (!CONST_INT_P (residual) && target_probe_range_p)
2053 x = GEN_INT (probe_range);
2054 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2055 we still want to probe up, but the safest amount if a word. */
2056 else if (target_probe_range_p)
2057 {
2058 if (INTVAL (residual) <= probe_range)
2059 x = GEN_INT (GET_MODE_SIZE (word_mode));
2060 else
2061 x = GEN_INT (probe_range);
2011 } 2062 }
2012 else 2063 else
2013 { 2064 /* If nothing else, probe at the top of the new allocation. */
2014 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, 2065 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2015 -GET_MODE_SIZE (word_mode))); 2066
2016 emit_insn (gen_blockage ()); 2067 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
2017 } 2068
2069 emit_insn (gen_blockage ());
2070 if (!CONST_INT_P (residual))
2071 emit_label (label);
2018 } 2072 }
2019 } 2073 }
2020 2074
2021 2075
2022 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) 2076 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2164 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing); 2218 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
2165 2219
2166 if (REG_P (val) 2220 if (REG_P (val)
2167 && GET_MODE (val) == BLKmode) 2221 && GET_MODE (val) == BLKmode)
2168 { 2222 {
2169 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); 2223 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
2170 opt_scalar_int_mode tmpmode; 2224 opt_scalar_int_mode tmpmode;
2171 2225
2172 /* int_size_in_bytes can return -1. We don't need a check here 2226 /* int_size_in_bytes can return -1. We don't need a check here
2173 since the value of bytes will then be large enough that no 2227 since the value of bytes will then be large enough that no
2174 mode will match anyway. */ 2228 mode will match anyway. */