Mercurial > hg > CbC > CbC_gcc
comparison gcc/expr.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
comparison
equal
deleted
inserted
replaced
111:04ced10e8804 | 131:84e7813d76e9 |
---|---|
1 /* Convert tree expression to rtl instructions, for GNU compiler. | 1 /* Convert tree expression to rtl instructions, for GNU compiler. |
2 Copyright (C) 1988-2017 Free Software Foundation, Inc. | 2 Copyright (C) 1988-2018 Free Software Foundation, Inc. |
3 | 3 |
4 This file is part of GCC. | 4 This file is part of GCC. |
5 | 5 |
6 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
7 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
52 #include "optabs-tree.h" | 52 #include "optabs-tree.h" |
53 #include "libfuncs.h" | 53 #include "libfuncs.h" |
54 #include "reload.h" | 54 #include "reload.h" |
55 #include "langhooks.h" | 55 #include "langhooks.h" |
56 #include "common/common-target.h" | 56 #include "common/common-target.h" |
57 #include "tree-dfa.h" | |
57 #include "tree-ssa-live.h" | 58 #include "tree-ssa-live.h" |
58 #include "tree-outof-ssa.h" | 59 #include "tree-outof-ssa.h" |
59 #include "tree-ssa-address.h" | 60 #include "tree-ssa-address.h" |
60 #include "builtins.h" | 61 #include "builtins.h" |
61 #include "tree-chkp.h" | |
62 #include "rtl-chkp.h" | |
63 #include "ccmp.h" | 62 #include "ccmp.h" |
63 #include "gimple-fold.h" | |
64 #include "rtx-vector-builder.h" | |
64 | 65 |
65 | 66 |
66 /* If this is nonzero, we do not bother generating VOLATILE | 67 /* If this is nonzero, we do not bother generating VOLATILE |
67 around volatile memory references, and we are willing to | 68 around volatile memory references, and we are willing to |
68 output indirect addresses. If cse is to follow, we reject | 69 output indirect addresses. If cse is to follow, we reject |
77 unsigned HOST_WIDE_INT); | 78 unsigned HOST_WIDE_INT); |
78 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); | 79 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); |
79 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); | 80 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); |
80 static rtx_insn *compress_float_constant (rtx, rtx); | 81 static rtx_insn *compress_float_constant (rtx, rtx); |
81 static rtx get_subtarget (rtx); | 82 static rtx get_subtarget (rtx); |
82 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT, | 83 static void store_constructor (tree, rtx, int, poly_int64, bool); |
83 HOST_WIDE_INT, unsigned HOST_WIDE_INT, | 84 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64, |
84 unsigned HOST_WIDE_INT, machine_mode, | |
85 tree, int, alias_set_type, bool); | |
86 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool); | |
87 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, | |
88 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, | |
89 machine_mode, tree, alias_set_type, bool, bool); | 85 machine_mode, tree, alias_set_type, bool, bool); |
90 | 86 |
91 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree); | 87 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree); |
92 | 88 |
93 static int is_aligning_offset (const_tree, const_tree); | 89 static int is_aligning_offset (const_tree, const_tree); |
235 && SUBREG_PROMOTED_VAR_P (from) | 231 && SUBREG_PROMOTED_VAR_P (from) |
236 && is_a <scalar_int_mode> (to_mode, &to_int_mode) | 232 && is_a <scalar_int_mode> (to_mode, &to_int_mode) |
237 && (GET_MODE_PRECISION (subreg_promoted_mode (from)) | 233 && (GET_MODE_PRECISION (subreg_promoted_mode (from)) |
238 >= GET_MODE_PRECISION (to_int_mode)) | 234 >= GET_MODE_PRECISION (to_int_mode)) |
239 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp)) | 235 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp)) |
240 from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode; | 236 { |
237 from = gen_lowpart (to_int_mode, SUBREG_REG (from)); | |
238 from_mode = to_int_mode; | |
239 } | |
241 | 240 |
242 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); | 241 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); |
243 | 242 |
244 if (to_mode == from_mode | 243 if (to_mode == from_mode |
245 || (from_mode == VOIDmode && CONSTANT_P (from))) | 244 || (from_mode == VOIDmode && CONSTANT_P (from))) |
248 return; | 247 return; |
249 } | 248 } |
250 | 249 |
251 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) | 250 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
252 { | 251 { |
253 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode)); | 252 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode), |
253 GET_MODE_BITSIZE (to_mode))); | |
254 | 254 |
255 if (VECTOR_MODE_P (to_mode)) | 255 if (VECTOR_MODE_P (to_mode)) |
256 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); | 256 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); |
257 else | 257 else |
258 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); | 258 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); |
690 scalar_int_mode int_oldmode; | 690 scalar_int_mode int_oldmode; |
691 if (is_int_mode (mode, &int_mode) | 691 if (is_int_mode (mode, &int_mode) |
692 && is_int_mode (oldmode, &int_oldmode) | 692 && is_int_mode (oldmode, &int_oldmode) |
693 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode) | 693 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode) |
694 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode]) | 694 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode]) |
695 || CONST_POLY_INT_P (x) | |
695 || (REG_P (x) | 696 || (REG_P (x) |
696 && (!HARD_REGISTER_P (x) | 697 && (!HARD_REGISTER_P (x) |
697 || targetm.hard_regno_mode_ok (REGNO (x), int_mode)) | 698 || targetm.hard_regno_mode_ok (REGNO (x), int_mode)) |
698 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x))))) | 699 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x))))) |
699 return gen_lowpart (int_mode, x); | 700 return gen_lowpart (int_mode, x); |
700 | 701 |
701 /* Converting from integer constant into mode is always equivalent to an | 702 /* Converting from integer constant into mode is always equivalent to an |
702 subreg operation. */ | 703 subreg operation. */ |
703 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) | 704 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) |
704 { | 705 { |
705 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode)); | 706 gcc_assert (known_eq (GET_MODE_BITSIZE (mode), |
707 GET_MODE_BITSIZE (oldmode))); | |
706 return simplify_gen_subreg (mode, x, oldmode, 0); | 708 return simplify_gen_subreg (mode, x, oldmode, 0); |
707 } | 709 } |
708 | 710 |
709 temp = gen_reg_rtx (mode); | 711 temp = gen_reg_rtx (mode); |
710 convert_move (temp, x, unsignedp); | 712 convert_move (temp, x, unsignedp); |
1564 unsigned int expected_align, HOST_WIDE_INT expected_size, | 1566 unsigned int expected_align, HOST_WIDE_INT expected_size, |
1565 unsigned HOST_WIDE_INT min_size, | 1567 unsigned HOST_WIDE_INT min_size, |
1566 unsigned HOST_WIDE_INT max_size, | 1568 unsigned HOST_WIDE_INT max_size, |
1567 unsigned HOST_WIDE_INT probable_max_size) | 1569 unsigned HOST_WIDE_INT probable_max_size) |
1568 { | 1570 { |
1569 bool may_use_call; | 1571 int may_use_call; |
1570 rtx retval = 0; | 1572 rtx retval = 0; |
1571 unsigned int align; | 1573 unsigned int align; |
1572 | 1574 |
1573 gcc_assert (size); | 1575 gcc_assert (size); |
1574 if (CONST_INT_P (size) && INTVAL (size) == 0) | 1576 if (CONST_INT_P (size) && INTVAL (size) == 0) |
1576 | 1578 |
1577 switch (method) | 1579 switch (method) |
1578 { | 1580 { |
1579 case BLOCK_OP_NORMAL: | 1581 case BLOCK_OP_NORMAL: |
1580 case BLOCK_OP_TAILCALL: | 1582 case BLOCK_OP_TAILCALL: |
1581 may_use_call = true; | 1583 may_use_call = 1; |
1582 break; | 1584 break; |
1583 | 1585 |
1584 case BLOCK_OP_CALL_PARM: | 1586 case BLOCK_OP_CALL_PARM: |
1585 may_use_call = block_move_libcall_safe_for_call_parm (); | 1587 may_use_call = block_move_libcall_safe_for_call_parm (); |
1586 | 1588 |
1588 to force it to pop the arguments right away. */ | 1590 to force it to pop the arguments right away. */ |
1589 NO_DEFER_POP; | 1591 NO_DEFER_POP; |
1590 break; | 1592 break; |
1591 | 1593 |
1592 case BLOCK_OP_NO_LIBCALL: | 1594 case BLOCK_OP_NO_LIBCALL: |
1593 may_use_call = false; | 1595 may_use_call = 0; |
1596 break; | |
1597 | |
1598 case BLOCK_OP_NO_LIBCALL_RET: | |
1599 may_use_call = -1; | |
1594 break; | 1600 break; |
1595 | 1601 |
1596 default: | 1602 default: |
1597 gcc_unreachable (); | 1603 gcc_unreachable (); |
1598 } | 1604 } |
1606 x = adjust_address (x, BLKmode, 0); | 1612 x = adjust_address (x, BLKmode, 0); |
1607 y = adjust_address (y, BLKmode, 0); | 1613 y = adjust_address (y, BLKmode, 0); |
1608 | 1614 |
1609 /* Set MEM_SIZE as appropriate for this block copy. The main place this | 1615 /* Set MEM_SIZE as appropriate for this block copy. The main place this |
1610 can be incorrect is coming from __builtin_memcpy. */ | 1616 can be incorrect is coming from __builtin_memcpy. */ |
1611 if (CONST_INT_P (size)) | 1617 poly_int64 const_size; |
1618 if (poly_int_rtx_p (size, &const_size)) | |
1612 { | 1619 { |
1613 x = shallow_copy_rtx (x); | 1620 x = shallow_copy_rtx (x); |
1614 y = shallow_copy_rtx (y); | 1621 y = shallow_copy_rtx (y); |
1615 set_mem_size (x, INTVAL (size)); | 1622 set_mem_size (x, const_size); |
1616 set_mem_size (y, INTVAL (size)); | 1623 set_mem_size (y, const_size); |
1617 } | 1624 } |
1618 | 1625 |
1619 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align)) | 1626 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align)) |
1620 move_by_pieces (x, y, INTVAL (size), align, 0); | 1627 move_by_pieces (x, y, INTVAL (size), align, 0); |
1621 else if (emit_block_move_via_movmem (x, y, size, align, | 1628 else if (emit_block_move_via_movmem (x, y, size, align, |
1624 ; | 1631 ; |
1625 else if (may_use_call | 1632 else if (may_use_call |
1626 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)) | 1633 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)) |
1627 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y))) | 1634 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y))) |
1628 { | 1635 { |
1636 if (may_use_call < 0) | |
1637 return pc_rtx; | |
1638 | |
1629 /* Since x and y are passed to a libcall, mark the corresponding | 1639 /* Since x and y are passed to a libcall, mark the corresponding |
1630 tree EXPR as addressable. */ | 1640 tree EXPR as addressable. */ |
1631 tree y_expr = MEM_EXPR (y); | 1641 tree y_expr = MEM_EXPR (y); |
1632 tree x_expr = MEM_EXPR (x); | 1642 tree x_expr = MEM_EXPR (x); |
1633 if (y_expr) | 1643 if (y_expr) |
2097 /* A subroutine of emit_group_load. Arguments as for emit_group_load, | 2107 /* A subroutine of emit_group_load. Arguments as for emit_group_load, |
2098 except that values are placed in TMPS[i], and must later be moved | 2108 except that values are placed in TMPS[i], and must later be moved |
2099 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */ | 2109 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */ |
2100 | 2110 |
2101 static void | 2111 static void |
2102 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize) | 2112 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, |
2113 poly_int64 ssize) | |
2103 { | 2114 { |
2104 rtx src; | 2115 rtx src; |
2105 int start, i; | 2116 int start, i; |
2106 machine_mode m = GET_MODE (orig_src); | 2117 machine_mode m = GET_MODE (orig_src); |
2107 | 2118 |
2136 | 2147 |
2137 /* Process the pieces. */ | 2148 /* Process the pieces. */ |
2138 for (i = start; i < XVECLEN (dst, 0); i++) | 2149 for (i = start; i < XVECLEN (dst, 0); i++) |
2139 { | 2150 { |
2140 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | 2151 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); |
2141 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); | 2152 poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (dst, 0, i), 1)); |
2142 unsigned int bytelen = GET_MODE_SIZE (mode); | 2153 poly_int64 bytelen = GET_MODE_SIZE (mode); |
2143 int shift = 0; | 2154 poly_int64 shift = 0; |
2144 | 2155 |
2145 /* Handle trailing fragments that run over the size of the struct. */ | 2156 /* Handle trailing fragments that run over the size of the struct. |
2146 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) | 2157 It's the target's responsibility to make sure that the fragment |
2158 cannot be strictly smaller in some cases and strictly larger | |
2159 in others. */ | |
2160 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize)); | |
2161 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize)) | |
2147 { | 2162 { |
2148 /* Arrange to shift the fragment to where it belongs. | 2163 /* Arrange to shift the fragment to where it belongs. |
2149 extract_bit_field loads to the lsb of the reg. */ | 2164 extract_bit_field loads to the lsb of the reg. */ |
2150 if ( | 2165 if ( |
2151 #ifdef BLOCK_REG_PADDING | 2166 #ifdef BLOCK_REG_PADDING |
2155 BYTES_BIG_ENDIAN | 2170 BYTES_BIG_ENDIAN |
2156 #endif | 2171 #endif |
2157 ) | 2172 ) |
2158 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | 2173 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; |
2159 bytelen = ssize - bytepos; | 2174 bytelen = ssize - bytepos; |
2160 gcc_assert (bytelen > 0); | 2175 gcc_assert (maybe_gt (bytelen, 0)); |
2161 } | 2176 } |
2162 | 2177 |
2163 /* If we won't be loading directly from memory, protect the real source | 2178 /* If we won't be loading directly from memory, protect the real source |
2164 from strange tricks we might play; but make sure that the source can | 2179 from strange tricks we might play; but make sure that the source can |
2165 be loaded directly into the destination. */ | 2180 be loaded directly into the destination. */ |
2179 | 2194 |
2180 /* Optimize the access just a bit. */ | 2195 /* Optimize the access just a bit. */ |
2181 if (MEM_P (src) | 2196 if (MEM_P (src) |
2182 && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src)) | 2197 && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src)) |
2183 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) | 2198 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) |
2184 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 | 2199 && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode)) |
2185 && bytelen == GET_MODE_SIZE (mode)) | 2200 && known_eq (bytelen, GET_MODE_SIZE (mode))) |
2186 { | 2201 { |
2187 tmps[i] = gen_reg_rtx (mode); | 2202 tmps[i] = gen_reg_rtx (mode); |
2188 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); | 2203 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); |
2189 } | 2204 } |
2190 else if (COMPLEX_MODE_P (mode) | 2205 else if (COMPLEX_MODE_P (mode) |
2191 && GET_MODE (src) == mode | 2206 && GET_MODE (src) == mode |
2192 && bytelen == GET_MODE_SIZE (mode)) | 2207 && known_eq (bytelen, GET_MODE_SIZE (mode))) |
2193 /* Let emit_move_complex do the bulk of the work. */ | 2208 /* Let emit_move_complex do the bulk of the work. */ |
2194 tmps[i] = src; | 2209 tmps[i] = src; |
2195 else if (GET_CODE (src) == CONCAT) | 2210 else if (GET_CODE (src) == CONCAT) |
2196 { | 2211 { |
2197 unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); | 2212 poly_int64 slen = GET_MODE_SIZE (GET_MODE (src)); |
2198 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); | 2213 poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); |
2199 unsigned int elt = bytepos / slen0; | 2214 unsigned int elt; |
2200 unsigned int subpos = bytepos % slen0; | 2215 poly_int64 subpos; |
2201 | 2216 |
2202 if (subpos + bytelen <= slen0) | 2217 if (can_div_trunc_p (bytepos, slen0, &elt, &subpos) |
2218 && known_le (subpos + bytelen, slen0)) | |
2203 { | 2219 { |
2204 /* The following assumes that the concatenated objects all | 2220 /* The following assumes that the concatenated objects all |
2205 have the same size. In this case, a simple calculation | 2221 have the same size. In this case, a simple calculation |
2206 can be used to determine the object and the bit field | 2222 can be used to determine the object and the bit field |
2207 to be extracted. */ | 2223 to be extracted. */ |
2208 tmps[i] = XEXP (src, elt); | 2224 tmps[i] = XEXP (src, elt); |
2209 if (subpos != 0 | 2225 if (maybe_ne (subpos, 0) |
2210 || subpos + bytelen != slen0 | 2226 || maybe_ne (subpos + bytelen, slen0) |
2211 || (!CONSTANT_P (tmps[i]) | 2227 || (!CONSTANT_P (tmps[i]) |
2212 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))) | 2228 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))) |
2213 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, | 2229 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, |
2214 subpos * BITS_PER_UNIT, | 2230 subpos * BITS_PER_UNIT, |
2215 1, NULL_RTX, mode, mode, false, | 2231 1, NULL_RTX, mode, mode, false, |
2217 } | 2233 } |
2218 else | 2234 else |
2219 { | 2235 { |
2220 rtx mem; | 2236 rtx mem; |
2221 | 2237 |
2222 gcc_assert (!bytepos); | 2238 gcc_assert (known_eq (bytepos, 0)); |
2223 mem = assign_stack_temp (GET_MODE (src), slen); | 2239 mem = assign_stack_temp (GET_MODE (src), slen); |
2224 emit_move_insn (mem, src); | 2240 emit_move_insn (mem, src); |
2225 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT, | 2241 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT, |
2226 0, 1, NULL_RTX, mode, mode, false, | 2242 0, 1, NULL_RTX, mode, mode, false, |
2227 NULL); | 2243 NULL); |
2231 SIMD register, which is currently broken. While we get GCC | 2247 SIMD register, which is currently broken. While we get GCC |
2232 to emit proper RTL for these cases, let's dump to memory. */ | 2248 to emit proper RTL for these cases, let's dump to memory. */ |
2233 else if (VECTOR_MODE_P (GET_MODE (dst)) | 2249 else if (VECTOR_MODE_P (GET_MODE (dst)) |
2234 && REG_P (src)) | 2250 && REG_P (src)) |
2235 { | 2251 { |
2236 int slen = GET_MODE_SIZE (GET_MODE (src)); | 2252 poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src)); |
2237 rtx mem; | 2253 rtx mem; |
2238 | 2254 |
2239 mem = assign_stack_temp (GET_MODE (src), slen); | 2255 mem = assign_stack_temp (GET_MODE (src), slen); |
2240 emit_move_insn (mem, src); | 2256 emit_move_insn (mem, src); |
2241 tmps[i] = adjust_address (mem, mode, (int) bytepos); | 2257 tmps[i] = adjust_address (mem, mode, bytepos); |
2242 } | 2258 } |
2243 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode | 2259 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode |
2244 && XVECLEN (dst, 0) > 1) | 2260 && XVECLEN (dst, 0) > 1) |
2245 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos); | 2261 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos); |
2246 else if (CONSTANT_P (src)) | 2262 else if (CONSTANT_P (src)) |
2247 { | 2263 { |
2248 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen; | 2264 if (known_eq (bytelen, ssize)) |
2249 | |
2250 if (len == ssize) | |
2251 tmps[i] = src; | 2265 tmps[i] = src; |
2252 else | 2266 else |
2253 { | 2267 { |
2254 rtx first, second; | 2268 rtx first, second; |
2255 | 2269 |
2256 /* TODO: const_wide_int can have sizes other than this... */ | 2270 /* TODO: const_wide_int can have sizes other than this... */ |
2257 gcc_assert (2 * len == ssize); | 2271 gcc_assert (known_eq (2 * bytelen, ssize)); |
2258 split_double (src, &first, &second); | 2272 split_double (src, &first, &second); |
2259 if (i) | 2273 if (i) |
2260 tmps[i] = second; | 2274 tmps[i] = second; |
2261 else | 2275 else |
2262 tmps[i] = first; | 2276 tmps[i] = first; |
2267 else | 2281 else |
2268 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, | 2282 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
2269 bytepos * BITS_PER_UNIT, 1, NULL_RTX, | 2283 bytepos * BITS_PER_UNIT, 1, NULL_RTX, |
2270 mode, mode, false, NULL); | 2284 mode, mode, false, NULL); |
2271 | 2285 |
2272 if (shift) | 2286 if (maybe_ne (shift, 0)) |
2273 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i], | 2287 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i], |
2274 shift, tmps[i], 0); | 2288 shift, tmps[i], 0); |
2275 } | 2289 } |
2276 } | 2290 } |
2277 | 2291 |
2279 where DST is non-consecutive registers represented by a PARALLEL. | 2293 where DST is non-consecutive registers represented by a PARALLEL. |
2280 SSIZE represents the total size of block ORIG_SRC in bytes, or -1 | 2294 SSIZE represents the total size of block ORIG_SRC in bytes, or -1 |
2281 if not known. */ | 2295 if not known. */ |
2282 | 2296 |
2283 void | 2297 void |
2284 emit_group_load (rtx dst, rtx src, tree type, int ssize) | 2298 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize) |
2285 { | 2299 { |
2286 rtx *tmps; | 2300 rtx *tmps; |
2287 int i; | 2301 int i; |
2288 | 2302 |
2289 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0)); | 2303 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0)); |
2302 /* Similar, but load SRC into new pseudos in a format that looks like | 2316 /* Similar, but load SRC into new pseudos in a format that looks like |
2303 PARALLEL. This can later be fed to emit_group_move to get things | 2317 PARALLEL. This can later be fed to emit_group_move to get things |
2304 in the right place. */ | 2318 in the right place. */ |
2305 | 2319 |
2306 rtx | 2320 rtx |
2307 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize) | 2321 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize) |
2308 { | 2322 { |
2309 rtvec vec; | 2323 rtvec vec; |
2310 int i; | 2324 int i; |
2311 | 2325 |
2312 vec = rtvec_alloc (XVECLEN (parallel, 0)); | 2326 vec = rtvec_alloc (XVECLEN (parallel, 0)); |
2373 where SRC is non-consecutive registers represented by a PARALLEL. | 2387 where SRC is non-consecutive registers represented by a PARALLEL. |
2374 SSIZE represents the total size of block ORIG_DST, or -1 if not | 2388 SSIZE represents the total size of block ORIG_DST, or -1 if not |
2375 known. */ | 2389 known. */ |
2376 | 2390 |
2377 void | 2391 void |
2378 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) | 2392 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, |
2393 poly_int64 ssize) | |
2379 { | 2394 { |
2380 rtx *tmps, dst; | 2395 rtx *tmps, dst; |
2381 int start, finish, i; | 2396 int start, finish, i; |
2382 machine_mode m = GET_MODE (orig_dst); | 2397 machine_mode m = GET_MODE (orig_dst); |
2383 | 2398 |
2448 } | 2463 } |
2449 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) | 2464 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) |
2450 { | 2465 { |
2451 machine_mode outer = GET_MODE (dst); | 2466 machine_mode outer = GET_MODE (dst); |
2452 machine_mode inner; | 2467 machine_mode inner; |
2453 HOST_WIDE_INT bytepos; | 2468 poly_int64 bytepos; |
2454 bool done = false; | 2469 bool done = false; |
2455 rtx temp; | 2470 rtx temp; |
2456 | 2471 |
2457 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER) | 2472 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER) |
2458 dst = gen_reg_rtx (outer); | 2473 dst = gen_reg_rtx (outer); |
2463 initialize the destination. */ | 2478 initialize the destination. */ |
2464 if (start < finish) | 2479 if (start < finish) |
2465 { | 2480 { |
2466 inner = GET_MODE (tmps[start]); | 2481 inner = GET_MODE (tmps[start]); |
2467 bytepos = subreg_lowpart_offset (inner, outer); | 2482 bytepos = subreg_lowpart_offset (inner, outer); |
2468 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos) | 2483 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, start), 1)), |
2484 bytepos)) | |
2469 { | 2485 { |
2470 temp = simplify_gen_subreg (outer, tmps[start], | 2486 temp = simplify_gen_subreg (outer, tmps[start], |
2471 inner, 0); | 2487 inner, 0); |
2472 if (temp) | 2488 if (temp) |
2473 { | 2489 { |
2482 if (!done | 2498 if (!done |
2483 && start < finish - 1) | 2499 && start < finish - 1) |
2484 { | 2500 { |
2485 inner = GET_MODE (tmps[finish - 1]); | 2501 inner = GET_MODE (tmps[finish - 1]); |
2486 bytepos = subreg_lowpart_offset (inner, outer); | 2502 bytepos = subreg_lowpart_offset (inner, outer); |
2487 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos) | 2503 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, |
2504 finish - 1), 1)), | |
2505 bytepos)) | |
2488 { | 2506 { |
2489 temp = simplify_gen_subreg (outer, tmps[finish - 1], | 2507 temp = simplify_gen_subreg (outer, tmps[finish - 1], |
2490 inner, 0); | 2508 inner, 0); |
2491 if (temp) | 2509 if (temp) |
2492 { | 2510 { |
2503 } | 2521 } |
2504 | 2522 |
2505 /* Process the pieces. */ | 2523 /* Process the pieces. */ |
2506 for (i = start; i < finish; i++) | 2524 for (i = start; i < finish; i++) |
2507 { | 2525 { |
2508 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); | 2526 poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, i), 1)); |
2509 machine_mode mode = GET_MODE (tmps[i]); | 2527 machine_mode mode = GET_MODE (tmps[i]); |
2510 unsigned int bytelen = GET_MODE_SIZE (mode); | 2528 poly_int64 bytelen = GET_MODE_SIZE (mode); |
2511 unsigned int adj_bytelen; | 2529 poly_uint64 adj_bytelen; |
2512 rtx dest = dst; | 2530 rtx dest = dst; |
2513 | 2531 |
2514 /* Handle trailing fragments that run over the size of the struct. */ | 2532 /* Handle trailing fragments that run over the size of the struct. |
2515 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) | 2533 It's the target's responsibility to make sure that the fragment |
2534 cannot be strictly smaller in some cases and strictly larger | |
2535 in others. */ | |
2536 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize)); | |
2537 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize)) | |
2516 adj_bytelen = ssize - bytepos; | 2538 adj_bytelen = ssize - bytepos; |
2517 else | 2539 else |
2518 adj_bytelen = bytelen; | 2540 adj_bytelen = bytelen; |
2519 | 2541 |
2520 if (GET_CODE (dst) == CONCAT) | 2542 if (GET_CODE (dst) == CONCAT) |
2521 { | 2543 { |
2522 if (bytepos + adj_bytelen | 2544 if (known_le (bytepos + adj_bytelen, |
2523 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | 2545 GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))) |
2524 dest = XEXP (dst, 0); | 2546 dest = XEXP (dst, 0); |
2525 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | 2547 else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))) |
2526 { | 2548 { |
2527 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); | 2549 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); |
2528 dest = XEXP (dst, 1); | 2550 dest = XEXP (dst, 1); |
2529 } | 2551 } |
2530 else | 2552 else |
2531 { | 2553 { |
2532 machine_mode dest_mode = GET_MODE (dest); | 2554 machine_mode dest_mode = GET_MODE (dest); |
2533 machine_mode tmp_mode = GET_MODE (tmps[i]); | 2555 machine_mode tmp_mode = GET_MODE (tmps[i]); |
2534 | 2556 |
2535 gcc_assert (bytepos == 0 && XVECLEN (src, 0)); | 2557 gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0)); |
2536 | 2558 |
2537 if (GET_MODE_ALIGNMENT (dest_mode) | 2559 if (GET_MODE_ALIGNMENT (dest_mode) |
2538 >= GET_MODE_ALIGNMENT (tmp_mode)) | 2560 >= GET_MODE_ALIGNMENT (tmp_mode)) |
2539 { | 2561 { |
2540 dest = assign_stack_temp (dest_mode, | 2562 dest = assign_stack_temp (dest_mode, |
2555 break; | 2577 break; |
2556 } | 2578 } |
2557 } | 2579 } |
2558 | 2580 |
2559 /* Handle trailing fragments that run over the size of the struct. */ | 2581 /* Handle trailing fragments that run over the size of the struct. */ |
2560 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) | 2582 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize)) |
2561 { | 2583 { |
2562 /* store_bit_field always takes its value from the lsb. | 2584 /* store_bit_field always takes its value from the lsb. |
2563 Move the fragment to the lsb if it's not already there. */ | 2585 Move the fragment to the lsb if it's not already there. */ |
2564 if ( | 2586 if ( |
2565 #ifdef BLOCK_REG_PADDING | 2587 #ifdef BLOCK_REG_PADDING |
2568 #else | 2590 #else |
2569 BYTES_BIG_ENDIAN | 2591 BYTES_BIG_ENDIAN |
2570 #endif | 2592 #endif |
2571 ) | 2593 ) |
2572 { | 2594 { |
2573 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | 2595 poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; |
2574 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], | 2596 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], |
2575 shift, tmps[i], 0); | 2597 shift, tmps[i], 0); |
2576 } | 2598 } |
2577 | 2599 |
2578 /* Make sure not to write past the end of the struct. */ | 2600 /* Make sure not to write past the end of the struct. */ |
2584 | 2606 |
2585 /* Optimize the access just a bit. */ | 2607 /* Optimize the access just a bit. */ |
2586 else if (MEM_P (dest) | 2608 else if (MEM_P (dest) |
2587 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest)) | 2609 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest)) |
2588 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) | 2610 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) |
2589 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 | 2611 && multiple_p (bytepos * BITS_PER_UNIT, |
2590 && bytelen == GET_MODE_SIZE (mode)) | 2612 GET_MODE_ALIGNMENT (mode)) |
2613 && known_eq (bytelen, GET_MODE_SIZE (mode))) | |
2591 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); | 2614 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); |
2592 | 2615 |
2593 else | 2616 else |
2594 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, | 2617 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
2595 0, 0, mode, tmps[i], false); | 2618 0, 0, mode, tmps[i], false); |
2626 { | 2649 { |
2627 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); | 2650 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
2628 rtx src = NULL, dst = NULL; | 2651 rtx src = NULL, dst = NULL; |
2629 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | 2652 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); |
2630 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; | 2653 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; |
2631 machine_mode mode = GET_MODE (srcreg); | 2654 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */ |
2632 machine_mode tmode = GET_MODE (target); | 2655 fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg)); |
2633 machine_mode copy_mode; | 2656 fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target)); |
2657 fixed_size_mode copy_mode; | |
2634 | 2658 |
2635 /* BLKmode registers created in the back-end shouldn't have survived. */ | 2659 /* BLKmode registers created in the back-end shouldn't have survived. */ |
2636 gcc_assert (mode != BLKmode); | 2660 gcc_assert (mode != BLKmode); |
2637 | 2661 |
2638 /* If the structure doesn't take up a whole number of words, see whether | 2662 /* If the structure doesn't take up a whole number of words, see whether |
2726 false, NULL), | 2750 false, NULL), |
2727 false); | 2751 false); |
2728 } | 2752 } |
2729 } | 2753 } |
2730 | 2754 |
2731 /* Copy BLKmode value SRC into a register of mode MODE. Return the | 2755 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the |
2732 register if it contains any data, otherwise return null. | 2756 register if it contains any data, otherwise return null. |
2733 | 2757 |
2734 This is used on targets that return BLKmode values in registers. */ | 2758 This is used on targets that return BLKmode values in registers. */ |
2735 | 2759 |
2736 rtx | 2760 rtx |
2737 copy_blkmode_to_reg (machine_mode mode, tree src) | 2761 copy_blkmode_to_reg (machine_mode mode_in, tree src) |
2738 { | 2762 { |
2739 int i, n_regs; | 2763 int i, n_regs; |
2740 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes; | 2764 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes; |
2741 unsigned int bitsize; | 2765 unsigned int bitsize; |
2742 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX; | 2766 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX; |
2743 machine_mode dst_mode; | 2767 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */ |
2768 fixed_size_mode mode = as_a <fixed_size_mode> (mode_in); | |
2769 fixed_size_mode dst_mode; | |
2770 scalar_int_mode min_mode; | |
2744 | 2771 |
2745 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode); | 2772 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode); |
2746 | 2773 |
2747 x = expand_normal (src); | 2774 x = expand_normal (src); |
2748 | 2775 |
2749 bytes = int_size_in_bytes (TREE_TYPE (src)); | 2776 bytes = arg_int_size_in_bytes (TREE_TYPE (src)); |
2750 if (bytes == 0) | 2777 if (bytes == 0) |
2751 return NULL_RTX; | 2778 return NULL_RTX; |
2752 | 2779 |
2753 /* If the structure doesn't take up a whole number of words, see | 2780 /* If the structure doesn't take up a whole number of words, see |
2754 whether the register value should be padded on the left or on | 2781 whether the register value should be padded on the left or on |
2768 * BITS_PER_UNIT)); | 2795 * BITS_PER_UNIT)); |
2769 | 2796 |
2770 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; | 2797 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; |
2771 dst_words = XALLOCAVEC (rtx, n_regs); | 2798 dst_words = XALLOCAVEC (rtx, n_regs); |
2772 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD); | 2799 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD); |
2800 min_mode = smallest_int_mode_for_size (bitsize); | |
2773 | 2801 |
2774 /* Copy the structure BITSIZE bits at a time. */ | 2802 /* Copy the structure BITSIZE bits at a time. */ |
2775 for (bitpos = 0, xbitpos = padding_correction; | 2803 for (bitpos = 0, xbitpos = padding_correction; |
2776 bitpos < bytes * BITS_PER_UNIT; | 2804 bitpos < bytes * BITS_PER_UNIT; |
2777 bitpos += bitsize, xbitpos += bitsize) | 2805 bitpos += bitsize, xbitpos += bitsize) |
2786 dst_word = gen_reg_rtx (word_mode); | 2814 dst_word = gen_reg_rtx (word_mode); |
2787 dst_words[xbitpos / BITS_PER_WORD] = dst_word; | 2815 dst_words[xbitpos / BITS_PER_WORD] = dst_word; |
2788 | 2816 |
2789 /* Clear the destination before we move anything into it. */ | 2817 /* Clear the destination before we move anything into it. */ |
2790 emit_move_insn (dst_word, CONST0_RTX (word_mode)); | 2818 emit_move_insn (dst_word, CONST0_RTX (word_mode)); |
2819 } | |
2820 | |
2821 /* Find the largest integer mode that can be used to copy all or as | |
2822 many bits as possible of the structure if the target supports larger | |
2823 copies. There are too many corner cases here w.r.t to alignments on | |
2824 the read/writes. So if there is any padding just use single byte | |
2825 operations. */ | |
2826 opt_scalar_int_mode mode_iter; | |
2827 if (padding_correction == 0 && !STRICT_ALIGNMENT) | |
2828 { | |
2829 FOR_EACH_MODE_FROM (mode_iter, min_mode) | |
2830 { | |
2831 unsigned int msize = GET_MODE_BITSIZE (mode_iter.require ()); | |
2832 if (msize <= ((bytes * BITS_PER_UNIT) - bitpos) | |
2833 && msize <= BITS_PER_WORD) | |
2834 bitsize = msize; | |
2835 else | |
2836 break; | |
2837 } | |
2791 } | 2838 } |
2792 | 2839 |
2793 /* We need a new source operand each time bitpos is on a word | 2840 /* We need a new source operand each time bitpos is on a word |
2794 boundary. */ | 2841 boundary. */ |
2795 if (bitpos % BITS_PER_WORD == 0) | 2842 if (bitpos % BITS_PER_WORD == 0) |
2951 | 2998 |
2952 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL); | 2999 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL); |
2953 | 3000 |
2954 /* If OBJECT is not BLKmode and SIZE is the same size as its mode, | 3001 /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2955 just move a zero. Otherwise, do this a piece at a time. */ | 3002 just move a zero. Otherwise, do this a piece at a time. */ |
3003 poly_int64 size_val; | |
2956 if (mode != BLKmode | 3004 if (mode != BLKmode |
2957 && CONST_INT_P (size) | 3005 && poly_int_rtx_p (size, &size_val) |
2958 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode)) | 3006 && known_eq (size_val, GET_MODE_SIZE (mode))) |
2959 { | 3007 { |
2960 rtx zero = CONST0_RTX (mode); | 3008 rtx zero = CONST0_RTX (mode); |
2961 if (zero != NULL) | 3009 if (zero != NULL) |
2962 { | 3010 { |
2963 emit_move_insn (object, zero); | 3011 emit_move_insn (object, zero); |
3332 | 3380 |
3333 rtx | 3381 rtx |
3334 emit_move_resolve_push (machine_mode mode, rtx x) | 3382 emit_move_resolve_push (machine_mode mode, rtx x) |
3335 { | 3383 { |
3336 enum rtx_code code = GET_CODE (XEXP (x, 0)); | 3384 enum rtx_code code = GET_CODE (XEXP (x, 0)); |
3337 HOST_WIDE_INT adjust; | |
3338 rtx temp; | 3385 rtx temp; |
3339 | 3386 |
3340 adjust = GET_MODE_SIZE (mode); | 3387 poly_int64 adjust = GET_MODE_SIZE (mode); |
3341 #ifdef PUSH_ROUNDING | 3388 #ifdef PUSH_ROUNDING |
3342 adjust = PUSH_ROUNDING (adjust); | 3389 adjust = PUSH_ROUNDING (adjust); |
3343 #endif | 3390 #endif |
3344 if (code == PRE_DEC || code == POST_DEC) | 3391 if (code == PRE_DEC || code == POST_DEC) |
3345 adjust = -adjust; | 3392 adjust = -adjust; |
3346 else if (code == PRE_MODIFY || code == POST_MODIFY) | 3393 else if (code == PRE_MODIFY || code == POST_MODIFY) |
3347 { | 3394 { |
3348 rtx expr = XEXP (XEXP (x, 0), 1); | 3395 rtx expr = XEXP (XEXP (x, 0), 1); |
3349 HOST_WIDE_INT val; | |
3350 | 3396 |
3351 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); | 3397 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); |
3352 gcc_assert (CONST_INT_P (XEXP (expr, 1))); | 3398 poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1)); |
3353 val = INTVAL (XEXP (expr, 1)); | |
3354 if (GET_CODE (expr) == MINUS) | 3399 if (GET_CODE (expr) == MINUS) |
3355 val = -val; | 3400 val = -val; |
3356 gcc_assert (adjust == val || adjust == -val); | 3401 gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val)); |
3357 adjust = val; | 3402 adjust = val; |
3358 } | 3403 } |
3359 | 3404 |
3360 /* Do not use anti_adjust_stack, since we don't want to update | 3405 /* Do not use anti_adjust_stack, since we don't want to update |
3361 stack_pointer_delta. */ | 3406 stack_pointer_delta. */ |
3393 { | 3438 { |
3394 scalar_mode submode = GET_MODE_INNER (mode); | 3439 scalar_mode submode = GET_MODE_INNER (mode); |
3395 bool imag_first; | 3440 bool imag_first; |
3396 | 3441 |
3397 #ifdef PUSH_ROUNDING | 3442 #ifdef PUSH_ROUNDING |
3398 unsigned int submodesize = GET_MODE_SIZE (submode); | 3443 poly_int64 submodesize = GET_MODE_SIZE (submode); |
3399 | 3444 |
3400 /* In case we output to the stack, but the size is smaller than the | 3445 /* In case we output to the stack, but the size is smaller than the |
3401 machine can push exactly, we need to use move instructions. */ | 3446 machine can push exactly, we need to use move instructions. */ |
3402 if (PUSH_ROUNDING (submodesize) != submodesize) | 3447 if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize)) |
3403 { | 3448 { |
3404 x = emit_move_resolve_push (mode, x); | 3449 x = emit_move_resolve_push (mode, x); |
3405 return emit_move_insn (x, y); | 3450 return emit_move_insn (x, y); |
3406 } | 3451 } |
3407 #endif | 3452 #endif |
3494 | 3539 |
3495 /* For memory to memory moves, optimal behavior can be had with the | 3540 /* For memory to memory moves, optimal behavior can be had with the |
3496 existing block move logic. */ | 3541 existing block move logic. */ |
3497 if (MEM_P (x) && MEM_P (y)) | 3542 if (MEM_P (x) && MEM_P (y)) |
3498 { | 3543 { |
3499 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)), | 3544 emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode), |
3500 BLOCK_OP_NO_LIBCALL); | 3545 BLOCK_OP_NO_LIBCALL); |
3501 return get_last_insn (); | 3546 return get_last_insn (); |
3502 } | 3547 } |
3503 | 3548 |
3504 ret = emit_move_via_integer (mode, x, y, true); | 3549 ret = emit_move_via_integer (mode, x, y, true); |
3542 undefined_operand_subword_p (const_rtx op, int i) | 3587 undefined_operand_subword_p (const_rtx op, int i) |
3543 { | 3588 { |
3544 if (GET_CODE (op) != SUBREG) | 3589 if (GET_CODE (op) != SUBREG) |
3545 return false; | 3590 return false; |
3546 machine_mode innermostmode = GET_MODE (SUBREG_REG (op)); | 3591 machine_mode innermostmode = GET_MODE (SUBREG_REG (op)); |
3547 HOST_WIDE_INT offset = i * UNITS_PER_WORD + subreg_memory_offset (op); | 3592 poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op); |
3548 return (offset >= GET_MODE_SIZE (innermostmode) | 3593 return (known_ge (offset, GET_MODE_SIZE (innermostmode)) |
3549 || offset <= -UNITS_PER_WORD); | 3594 || known_le (offset, -UNITS_PER_WORD)); |
3550 } | 3595 } |
3551 | 3596 |
3552 /* A subroutine of emit_move_insn_1. Generate a move from Y into X. | 3597 /* A subroutine of emit_move_insn_1. Generate a move from Y into X. |
3553 MODE is any multi-word or full-word mode that lacks a move_insn | 3598 MODE is any multi-word or full-word mode that lacks a move_insn |
3554 pattern. Note that you will get better code if you define such | 3599 pattern. Note that you will get better code if you define such |
3559 { | 3604 { |
3560 rtx_insn *last_insn = 0; | 3605 rtx_insn *last_insn = 0; |
3561 rtx_insn *seq; | 3606 rtx_insn *seq; |
3562 rtx inner; | 3607 rtx inner; |
3563 bool need_clobber; | 3608 bool need_clobber; |
3564 int i; | 3609 int i, mode_size; |
3565 | 3610 |
3566 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD); | 3611 /* This function can only handle cases where the number of words is |
3612 known at compile time. */ | |
3613 mode_size = GET_MODE_SIZE (mode).to_constant (); | |
3614 gcc_assert (mode_size >= UNITS_PER_WORD); | |
3567 | 3615 |
3568 /* If X is a push on the stack, do the push now and replace | 3616 /* If X is a push on the stack, do the push now and replace |
3569 X with a reference to the stack pointer. */ | 3617 X with a reference to the stack pointer. */ |
3570 if (push_operand (x, mode)) | 3618 if (push_operand (x, mode)) |
3571 x = emit_move_resolve_push (mode, x); | 3619 x = emit_move_resolve_push (mode, x); |
3580 y = replace_equiv_address_nv (y, inner); | 3628 y = replace_equiv_address_nv (y, inner); |
3581 | 3629 |
3582 start_sequence (); | 3630 start_sequence (); |
3583 | 3631 |
3584 need_clobber = false; | 3632 need_clobber = false; |
3585 for (i = 0; | 3633 for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++) |
3586 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; | |
3587 i++) | |
3588 { | 3634 { |
3589 rtx xpart = operand_subword (x, i, 1, mode); | 3635 rtx xpart = operand_subword (x, i, 1, mode); |
3590 rtx ypart; | 3636 rtx ypart; |
3591 | 3637 |
3592 /* Do not generate code for a move if it would come entirely | 3638 /* Do not generate code for a move if it would come entirely |
3668 | 3714 |
3669 /* Try using a move pattern for the corresponding integer mode. This is | 3715 /* Try using a move pattern for the corresponding integer mode. This is |
3670 only safe when simplify_subreg can convert MODE constants into integer | 3716 only safe when simplify_subreg can convert MODE constants into integer |
3671 constants. At present, it can only do this reliably if the value | 3717 constants. At present, it can only do this reliably if the value |
3672 fits within a HOST_WIDE_INT. */ | 3718 fits within a HOST_WIDE_INT. */ |
3673 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | 3719 if (!CONSTANT_P (y) |
3720 || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT)) | |
3674 { | 3721 { |
3675 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress); | 3722 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress); |
3676 | 3723 |
3677 if (ret) | 3724 if (ret) |
3678 { | 3725 { |
3863 EXTRA is the number of bytes of padding to push in addition to SIZE. | 3910 EXTRA is the number of bytes of padding to push in addition to SIZE. |
3864 BELOW nonzero means this padding comes at low addresses; | 3911 BELOW nonzero means this padding comes at low addresses; |
3865 otherwise, the padding comes at high addresses. */ | 3912 otherwise, the padding comes at high addresses. */ |
3866 | 3913 |
3867 rtx | 3914 rtx |
3868 push_block (rtx size, int extra, int below) | 3915 push_block (rtx size, poly_int64 extra, int below) |
3869 { | 3916 { |
3870 rtx temp; | 3917 rtx temp; |
3871 | 3918 |
3872 size = convert_modes (Pmode, ptr_mode, size, 1); | 3919 size = convert_modes (Pmode, ptr_mode, size, 1); |
3873 if (CONSTANT_P (size)) | 3920 if (CONSTANT_P (size)) |
3874 anti_adjust_stack (plus_constant (Pmode, size, extra)); | 3921 anti_adjust_stack (plus_constant (Pmode, size, extra)); |
3875 else if (REG_P (size) && extra == 0) | 3922 else if (REG_P (size) && known_eq (extra, 0)) |
3876 anti_adjust_stack (size); | 3923 anti_adjust_stack (size); |
3877 else | 3924 else |
3878 { | 3925 { |
3879 temp = copy_to_mode_reg (Pmode, size); | 3926 temp = copy_to_mode_reg (Pmode, size); |
3880 if (extra != 0) | 3927 if (maybe_ne (extra, 0)) |
3881 temp = expand_binop (Pmode, add_optab, temp, | 3928 temp = expand_binop (Pmode, add_optab, temp, |
3882 gen_int_mode (extra, Pmode), | 3929 gen_int_mode (extra, Pmode), |
3883 temp, 0, OPTAB_LIB_WIDEN); | 3930 temp, 0, OPTAB_LIB_WIDEN); |
3884 anti_adjust_stack (temp); | 3931 anti_adjust_stack (temp); |
3885 } | 3932 } |
3886 | 3933 |
3887 if (STACK_GROWS_DOWNWARD) | 3934 if (STACK_GROWS_DOWNWARD) |
3888 { | 3935 { |
3889 temp = virtual_outgoing_args_rtx; | 3936 temp = virtual_outgoing_args_rtx; |
3890 if (extra != 0 && below) | 3937 if (maybe_ne (extra, 0) && below) |
3891 temp = plus_constant (Pmode, temp, extra); | 3938 temp = plus_constant (Pmode, temp, extra); |
3892 } | 3939 } |
3893 else | 3940 else |
3894 { | 3941 { |
3895 if (CONST_INT_P (size)) | 3942 poly_int64 csize; |
3943 if (poly_int_rtx_p (size, &csize)) | |
3896 temp = plus_constant (Pmode, virtual_outgoing_args_rtx, | 3944 temp = plus_constant (Pmode, virtual_outgoing_args_rtx, |
3897 -INTVAL (size) - (below ? 0 : extra)); | 3945 -csize - (below ? 0 : extra)); |
3898 else if (extra != 0 && !below) | 3946 else if (maybe_ne (extra, 0) && !below) |
3899 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | 3947 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, |
3900 negate_rtx (Pmode, plus_constant (Pmode, size, | 3948 negate_rtx (Pmode, plus_constant (Pmode, size, |
3901 extra))); | 3949 extra))); |
3902 else | 3950 else |
3903 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | 3951 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, |
3937 Insns in the sequence that do not modify the SP are ignored, | 3985 Insns in the sequence that do not modify the SP are ignored, |
3938 except for noreturn calls. | 3986 except for noreturn calls. |
3939 | 3987 |
3940 The return value is the amount of adjustment that can be trivially | 3988 The return value is the amount of adjustment that can be trivially |
3941 verified, via immediate operand or auto-inc. If the adjustment | 3989 verified, via immediate operand or auto-inc. If the adjustment |
3942 cannot be trivially extracted, the return value is INT_MIN. */ | 3990 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */ |
3943 | 3991 |
3944 HOST_WIDE_INT | 3992 poly_int64 |
3945 find_args_size_adjust (rtx_insn *insn) | 3993 find_args_size_adjust (rtx_insn *insn) |
3946 { | 3994 { |
3947 rtx dest, set, pat; | 3995 rtx dest, set, pat; |
3948 int i; | 3996 int i; |
3949 | 3997 |
4012 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM) | 4060 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM) |
4013 { | 4061 { |
4014 /* Look for a trivial adjustment, otherwise assume nothing. */ | 4062 /* Look for a trivial adjustment, otherwise assume nothing. */ |
4015 /* Note that the SPU restore_stack_block pattern refers to | 4063 /* Note that the SPU restore_stack_block pattern refers to |
4016 the stack pointer in V4SImode. Consider that non-trivial. */ | 4064 the stack pointer in V4SImode. Consider that non-trivial. */ |
4065 poly_int64 offset; | |
4017 if (SCALAR_INT_MODE_P (GET_MODE (dest)) | 4066 if (SCALAR_INT_MODE_P (GET_MODE (dest)) |
4018 && GET_CODE (SET_SRC (set)) == PLUS | 4067 && strip_offset (SET_SRC (set), &offset) == stack_pointer_rtx) |
4019 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx | 4068 return offset; |
4020 && CONST_INT_P (XEXP (SET_SRC (set), 1))) | |
4021 return INTVAL (XEXP (SET_SRC (set), 1)); | |
4022 /* ??? Reload can generate no-op moves, which will be cleaned | 4069 /* ??? Reload can generate no-op moves, which will be cleaned |
4023 up later. Recognize it and continue searching. */ | 4070 up later. Recognize it and continue searching. */ |
4024 else if (rtx_equal_p (dest, SET_SRC (set))) | 4071 else if (rtx_equal_p (dest, SET_SRC (set))) |
4025 return 0; | 4072 return 0; |
4026 else | 4073 else |
4054 case PRE_MODIFY: | 4101 case PRE_MODIFY: |
4055 case POST_MODIFY: | 4102 case POST_MODIFY: |
4056 addr = XEXP (addr, 1); | 4103 addr = XEXP (addr, 1); |
4057 gcc_assert (GET_CODE (addr) == PLUS); | 4104 gcc_assert (GET_CODE (addr) == PLUS); |
4058 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx); | 4105 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx); |
4059 gcc_assert (CONST_INT_P (XEXP (addr, 1))); | 4106 return rtx_to_poly_int64 (XEXP (addr, 1)); |
4060 return INTVAL (XEXP (addr, 1)); | |
4061 default: | 4107 default: |
4062 gcc_unreachable (); | 4108 gcc_unreachable (); |
4063 } | 4109 } |
4064 } | 4110 } |
4065 } | 4111 } |
4066 | 4112 |
4067 int | 4113 poly_int64 |
4068 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size) | 4114 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, |
4069 { | 4115 poly_int64 end_args_size) |
4070 int args_size = end_args_size; | 4116 { |
4117 poly_int64 args_size = end_args_size; | |
4071 bool saw_unknown = false; | 4118 bool saw_unknown = false; |
4072 rtx_insn *insn; | 4119 rtx_insn *insn; |
4073 | 4120 |
4074 for (insn = last; insn != prev; insn = PREV_INSN (insn)) | 4121 for (insn = last; insn != prev; insn = PREV_INSN (insn)) |
4075 { | 4122 { |
4076 HOST_WIDE_INT this_delta; | |
4077 | |
4078 if (!NONDEBUG_INSN_P (insn)) | 4123 if (!NONDEBUG_INSN_P (insn)) |
4079 continue; | 4124 continue; |
4080 | 4125 |
4081 this_delta = find_args_size_adjust (insn); | 4126 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing |
4082 if (this_delta == 0) | 4127 a call argument containing a TLS address that itself requires |
4128 a call to __tls_get_addr. The handling of stack_pointer_delta | |
4129 in emit_single_push_insn is supposed to ensure that any such | |
4130 notes are already correct. */ | |
4131 rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX); | |
4132 gcc_assert (!note || known_eq (args_size, get_args_size (note))); | |
4133 | |
4134 poly_int64 this_delta = find_args_size_adjust (insn); | |
4135 if (known_eq (this_delta, 0)) | |
4083 { | 4136 { |
4084 if (!CALL_P (insn) | 4137 if (!CALL_P (insn) |
4085 || ACCUMULATE_OUTGOING_ARGS | 4138 || ACCUMULATE_OUTGOING_ARGS |
4086 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX) | 4139 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX) |
4087 continue; | 4140 continue; |
4088 } | 4141 } |
4089 | 4142 |
4090 gcc_assert (!saw_unknown); | 4143 gcc_assert (!saw_unknown); |
4091 if (this_delta == HOST_WIDE_INT_MIN) | 4144 if (known_eq (this_delta, HOST_WIDE_INT_MIN)) |
4092 saw_unknown = true; | 4145 saw_unknown = true; |
4093 | 4146 |
4094 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size)); | 4147 if (!note) |
4148 add_args_size_note (insn, args_size); | |
4095 if (STACK_GROWS_DOWNWARD) | 4149 if (STACK_GROWS_DOWNWARD) |
4096 this_delta = -(unsigned HOST_WIDE_INT) this_delta; | 4150 this_delta = -poly_uint64 (this_delta); |
4097 | 4151 |
4098 args_size -= this_delta; | 4152 if (saw_unknown) |
4099 } | 4153 args_size = HOST_WIDE_INT_MIN; |
4100 | 4154 else |
4101 return saw_unknown ? INT_MIN : args_size; | 4155 args_size -= this_delta; |
4156 } | |
4157 | |
4158 return args_size; | |
4102 } | 4159 } |
4103 | 4160 |
4104 #ifdef PUSH_ROUNDING | 4161 #ifdef PUSH_ROUNDING |
4105 /* Emit single push insn. */ | 4162 /* Emit single push insn. */ |
4106 | 4163 |
4107 static void | 4164 static void |
4108 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type) | 4165 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type) |
4109 { | 4166 { |
4110 rtx dest_addr; | 4167 rtx dest_addr; |
4111 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); | 4168 poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
4112 rtx dest; | 4169 rtx dest; |
4113 enum insn_code icode; | 4170 enum insn_code icode; |
4114 | 4171 |
4115 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); | |
4116 /* If there is push pattern, use it. Otherwise try old way of throwing | 4172 /* If there is push pattern, use it. Otherwise try old way of throwing |
4117 MEM representing push operation to move expander. */ | 4173 MEM representing push operation to move expander. */ |
4118 icode = optab_handler (push_optab, mode); | 4174 icode = optab_handler (push_optab, mode); |
4119 if (icode != CODE_FOR_nothing) | 4175 if (icode != CODE_FOR_nothing) |
4120 { | 4176 { |
4122 | 4178 |
4123 create_input_operand (&ops[0], x, mode); | 4179 create_input_operand (&ops[0], x, mode); |
4124 if (maybe_expand_insn (icode, 1, ops)) | 4180 if (maybe_expand_insn (icode, 1, ops)) |
4125 return; | 4181 return; |
4126 } | 4182 } |
4127 if (GET_MODE_SIZE (mode) == rounded_size) | 4183 if (known_eq (GET_MODE_SIZE (mode), rounded_size)) |
4128 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | 4184 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); |
4129 /* If we are to pad downward, adjust the stack pointer first and | 4185 /* If we are to pad downward, adjust the stack pointer first and |
4130 then store X into the stack location using an offset. This is | 4186 then store X into the stack location using an offset. This is |
4131 because emit_move_insn does not know how to pad; it does not have | 4187 because emit_move_insn does not know how to pad; it does not have |
4132 access to type. */ | 4188 access to type. */ |
4133 else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD) | 4189 else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD) |
4134 { | 4190 { |
4135 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode); | |
4136 HOST_WIDE_INT offset; | |
4137 | |
4138 emit_move_insn (stack_pointer_rtx, | 4191 emit_move_insn (stack_pointer_rtx, |
4139 expand_binop (Pmode, | 4192 expand_binop (Pmode, |
4140 STACK_GROWS_DOWNWARD ? sub_optab | 4193 STACK_GROWS_DOWNWARD ? sub_optab |
4141 : add_optab, | 4194 : add_optab, |
4142 stack_pointer_rtx, | 4195 stack_pointer_rtx, |
4143 gen_int_mode (rounded_size, Pmode), | 4196 gen_int_mode (rounded_size, Pmode), |
4144 NULL_RTX, 0, OPTAB_LIB_WIDEN)); | 4197 NULL_RTX, 0, OPTAB_LIB_WIDEN)); |
4145 | 4198 |
4146 offset = (HOST_WIDE_INT) padding_size; | 4199 poly_int64 offset = rounded_size - GET_MODE_SIZE (mode); |
4147 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC) | 4200 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC) |
4148 /* We have already decremented the stack pointer, so get the | 4201 /* We have already decremented the stack pointer, so get the |
4149 previous value. */ | 4202 previous value. */ |
4150 offset += (HOST_WIDE_INT) rounded_size; | 4203 offset += rounded_size; |
4151 | 4204 |
4152 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC) | 4205 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC) |
4153 /* We have already incremented the stack pointer, so get the | 4206 /* We have already incremented the stack pointer, so get the |
4154 previous value. */ | 4207 previous value. */ |
4155 offset -= (HOST_WIDE_INT) rounded_size; | 4208 offset -= rounded_size; |
4156 | 4209 |
4157 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | 4210 dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset); |
4158 gen_int_mode (offset, Pmode)); | |
4159 } | 4211 } |
4160 else | 4212 else |
4161 { | 4213 { |
4162 if (STACK_GROWS_DOWNWARD) | 4214 if (STACK_GROWS_DOWNWARD) |
4163 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ | 4215 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ |
4164 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | 4216 dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size); |
4165 gen_int_mode (-(HOST_WIDE_INT) rounded_size, | |
4166 Pmode)); | |
4167 else | 4217 else |
4168 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ | 4218 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ |
4169 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | 4219 dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size); |
4170 gen_int_mode (rounded_size, Pmode)); | |
4171 | 4220 |
4172 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | 4221 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); |
4173 } | 4222 } |
4174 | 4223 |
4175 dest = gen_rtx_MEM (mode, dest_addr); | 4224 dest = gen_rtx_MEM (mode, dest_addr); |
4191 /* Emit and annotate a single push insn. */ | 4240 /* Emit and annotate a single push insn. */ |
4192 | 4241 |
4193 static void | 4242 static void |
4194 emit_single_push_insn (machine_mode mode, rtx x, tree type) | 4243 emit_single_push_insn (machine_mode mode, rtx x, tree type) |
4195 { | 4244 { |
4196 int delta, old_delta = stack_pointer_delta; | 4245 poly_int64 delta, old_delta = stack_pointer_delta; |
4197 rtx_insn *prev = get_last_insn (); | 4246 rtx_insn *prev = get_last_insn (); |
4198 rtx_insn *last; | 4247 rtx_insn *last; |
4199 | 4248 |
4200 emit_single_push_insn_1 (mode, x, type); | 4249 emit_single_push_insn_1 (mode, x, type); |
4201 | 4250 |
4251 /* Adjust stack_pointer_delta to describe the situation after the push | |
4252 we just performed. Note that we must do this after the push rather | |
4253 than before the push in case calculating X needs pushes and pops of | |
4254 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes | |
4255 for such pushes and pops must not include the effect of the future | |
4256 push of X. */ | |
4257 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); | |
4258 | |
4202 last = get_last_insn (); | 4259 last = get_last_insn (); |
4203 | 4260 |
4204 /* Notice the common case where we emitted exactly one insn. */ | 4261 /* Notice the common case where we emitted exactly one insn. */ |
4205 if (PREV_INSN (last) == prev) | 4262 if (PREV_INSN (last) == prev) |
4206 { | 4263 { |
4207 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | 4264 add_args_size_note (last, stack_pointer_delta); |
4208 return; | 4265 return; |
4209 } | 4266 } |
4210 | 4267 |
4211 delta = fixup_args_size_notes (prev, last, stack_pointer_delta); | 4268 delta = fixup_args_size_notes (prev, last, stack_pointer_delta); |
4212 gcc_assert (delta == INT_MIN || delta == old_delta); | 4269 gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN) |
4270 || known_eq (delta, old_delta)); | |
4213 } | 4271 } |
4214 #endif | 4272 #endif |
4215 | 4273 |
4216 /* If reading SIZE bytes from X will end up reading from | 4274 /* If reading SIZE bytes from X will end up reading from |
4217 Y return the number of bytes that overlap. Return -1 | 4275 Y return the number of bytes that overlap. Return -1 |
4267 for arguments passed in registers. If nonzero, it will be the number | 4325 for arguments passed in registers. If nonzero, it will be the number |
4268 of bytes required. */ | 4326 of bytes required. */ |
4269 | 4327 |
4270 bool | 4328 bool |
4271 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size, | 4329 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size, |
4272 unsigned int align, int partial, rtx reg, int extra, | 4330 unsigned int align, int partial, rtx reg, poly_int64 extra, |
4273 rtx args_addr, rtx args_so_far, int reg_parm_stack_space, | 4331 rtx args_addr, rtx args_so_far, int reg_parm_stack_space, |
4274 rtx alignment_pad, bool sibcall_p) | 4332 rtx alignment_pad, bool sibcall_p) |
4275 { | 4333 { |
4276 rtx xinner; | 4334 rtx xinner; |
4277 pad_direction stack_direction | 4335 pad_direction stack_direction |
4310 if (mode != BLKmode) | 4368 if (mode != BLKmode) |
4311 { | 4369 { |
4312 /* A value is to be stored in an insufficiently aligned | 4370 /* A value is to be stored in an insufficiently aligned |
4313 stack slot; copy via a suitably aligned slot if | 4371 stack slot; copy via a suitably aligned slot if |
4314 necessary. */ | 4372 necessary. */ |
4315 size = GEN_INT (GET_MODE_SIZE (mode)); | 4373 size = gen_int_mode (GET_MODE_SIZE (mode), Pmode); |
4316 if (!MEM_P (xinner)) | 4374 if (!MEM_P (xinner)) |
4317 { | 4375 { |
4318 temp = assign_temp (type, 1, 1); | 4376 temp = assign_temp (type, 1, 1); |
4319 emit_move_insn (temp, xinner); | 4377 emit_move_insn (temp, xinner); |
4320 xinner = temp; | 4378 xinner = temp; |
4348 /* Here we avoid the case of a structure whose weak alignment | 4406 /* Here we avoid the case of a structure whose weak alignment |
4349 forces many pushes of a small amount of data, | 4407 forces many pushes of a small amount of data, |
4350 and such small pushes do rounding that causes trouble. */ | 4408 and such small pushes do rounding that causes trouble. */ |
4351 && ((!targetm.slow_unaligned_access (word_mode, align)) | 4409 && ((!targetm.slow_unaligned_access (word_mode, align)) |
4352 || align >= BIGGEST_ALIGNMENT | 4410 || align >= BIGGEST_ALIGNMENT |
4353 || (PUSH_ROUNDING (align / BITS_PER_UNIT) | 4411 || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT), |
4354 == (align / BITS_PER_UNIT))) | 4412 align / BITS_PER_UNIT)) |
4355 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) | 4413 && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size))) |
4356 { | 4414 { |
4357 /* Push padding now if padding above and stack grows down, | 4415 /* Push padding now if padding above and stack grows down, |
4358 or if padding below and stack grows up. | 4416 or if padding below and stack grows up. |
4359 But if space already allocated, this has already been done. */ | 4417 But if space already allocated, this has already been done. */ |
4360 if (extra && args_addr == 0 | 4418 if (maybe_ne (extra, 0) |
4361 && where_pad != PAD_NONE && where_pad != stack_direction) | 4419 && args_addr == 0 |
4362 anti_adjust_stack (GEN_INT (extra)); | 4420 && where_pad != PAD_NONE |
4421 && where_pad != stack_direction) | |
4422 anti_adjust_stack (gen_int_mode (extra, Pmode)); | |
4363 | 4423 |
4364 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); | 4424 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); |
4365 } | 4425 } |
4366 else | 4426 else |
4367 #endif /* PUSH_ROUNDING */ | 4427 #endif /* PUSH_ROUNDING */ |
4383 } | 4443 } |
4384 | 4444 |
4385 /* Get the address of the stack space. | 4445 /* Get the address of the stack space. |
4386 In this case, we do not deal with EXTRA separately. | 4446 In this case, we do not deal with EXTRA separately. |
4387 A single stack adjust will do. */ | 4447 A single stack adjust will do. */ |
4448 poly_int64 offset; | |
4388 if (! args_addr) | 4449 if (! args_addr) |
4389 { | 4450 { |
4390 temp = push_block (size, extra, where_pad == PAD_DOWNWARD); | 4451 temp = push_block (size, extra, where_pad == PAD_DOWNWARD); |
4391 extra = 0; | 4452 extra = 0; |
4392 } | 4453 } |
4393 else if (CONST_INT_P (args_so_far)) | 4454 else if (poly_int_rtx_p (args_so_far, &offset)) |
4394 temp = memory_address (BLKmode, | 4455 temp = memory_address (BLKmode, |
4395 plus_constant (Pmode, args_addr, | 4456 plus_constant (Pmode, args_addr, |
4396 skip + INTVAL (args_so_far))); | 4457 skip + offset)); |
4397 else | 4458 else |
4398 temp = memory_address (BLKmode, | 4459 temp = memory_address (BLKmode, |
4399 plus_constant (Pmode, | 4460 plus_constant (Pmode, |
4400 gen_rtx_PLUS (Pmode, | 4461 gen_rtx_PLUS (Pmode, |
4401 args_addr, | 4462 args_addr, |
4464 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); | 4525 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); |
4465 } | 4526 } |
4466 } | 4527 } |
4467 else if (partial > 0) | 4528 else if (partial > 0) |
4468 { | 4529 { |
4469 /* Scalar partly in registers. */ | 4530 /* Scalar partly in registers. This case is only supported |
4470 | 4531 for fixed-wdth modes. */ |
4471 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | 4532 int size = GET_MODE_SIZE (mode).to_constant (); |
4533 size /= UNITS_PER_WORD; | |
4472 int i; | 4534 int i; |
4473 int not_stack; | 4535 int not_stack; |
4474 /* # bytes of start of argument | 4536 /* # bytes of start of argument |
4475 that we must make space for but need not store. */ | 4537 that we must make space for but need not store. */ |
4476 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); | 4538 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); |
4478 int skip; | 4540 int skip; |
4479 | 4541 |
4480 /* Push padding now if padding above and stack grows down, | 4542 /* Push padding now if padding above and stack grows down, |
4481 or if padding below and stack grows up. | 4543 or if padding below and stack grows up. |
4482 But if space already allocated, this has already been done. */ | 4544 But if space already allocated, this has already been done. */ |
4483 if (extra && args_addr == 0 | 4545 if (maybe_ne (extra, 0) |
4484 && where_pad != PAD_NONE && where_pad != stack_direction) | 4546 && args_addr == 0 |
4485 anti_adjust_stack (GEN_INT (extra)); | 4547 && where_pad != PAD_NONE |
4548 && where_pad != stack_direction) | |
4549 anti_adjust_stack (gen_int_mode (extra, Pmode)); | |
4486 | 4550 |
4487 /* If we make space by pushing it, we might as well push | 4551 /* If we make space by pushing it, we might as well push |
4488 the real data. Otherwise, we can leave OFFSET nonzero | 4552 the real data. Otherwise, we can leave OFFSET nonzero |
4489 and leave the space uninitialized. */ | 4553 and leave the space uninitialized. */ |
4490 if (args_addr == 0) | 4554 if (args_addr == 0) |
4529 rtx dest; | 4593 rtx dest; |
4530 | 4594 |
4531 /* Push padding now if padding above and stack grows down, | 4595 /* Push padding now if padding above and stack grows down, |
4532 or if padding below and stack grows up. | 4596 or if padding below and stack grows up. |
4533 But if space already allocated, this has already been done. */ | 4597 But if space already allocated, this has already been done. */ |
4534 if (extra && args_addr == 0 | 4598 if (maybe_ne (extra, 0) |
4535 && where_pad != PAD_NONE && where_pad != stack_direction) | 4599 && args_addr == 0 |
4536 anti_adjust_stack (GEN_INT (extra)); | 4600 && where_pad != PAD_NONE |
4601 && where_pad != stack_direction) | |
4602 anti_adjust_stack (gen_int_mode (extra, Pmode)); | |
4537 | 4603 |
4538 #ifdef PUSH_ROUNDING | 4604 #ifdef PUSH_ROUNDING |
4539 if (args_addr == 0 && PUSH_ARGS) | 4605 if (args_addr == 0 && PUSH_ARGS) |
4540 emit_single_push_insn (mode, x, type); | 4606 emit_single_push_insn (mode, x, type); |
4541 else | 4607 else |
4576 tmp_regs[i]); | 4642 tmp_regs[i]); |
4577 | 4643 |
4578 } | 4644 } |
4579 } | 4645 } |
4580 | 4646 |
4581 if (extra && args_addr == 0 && where_pad == stack_direction) | 4647 if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction) |
4582 anti_adjust_stack (GEN_INT (extra)); | 4648 anti_adjust_stack (gen_int_mode (extra, Pmode)); |
4583 | 4649 |
4584 if (alignment_pad && args_addr == 0) | 4650 if (alignment_pad && args_addr == 0) |
4585 anti_adjust_stack (alignment_pad); | 4651 anti_adjust_stack (alignment_pad); |
4586 | 4652 |
4587 return true; | 4653 return true; |
4605 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where | 4671 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where |
4606 FIELD is a bitfield. Returns true if the optimization was successful, | 4672 FIELD is a bitfield. Returns true if the optimization was successful, |
4607 and there's nothing else to do. */ | 4673 and there's nothing else to do. */ |
4608 | 4674 |
4609 static bool | 4675 static bool |
4610 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize, | 4676 optimize_bitfield_assignment_op (poly_uint64 pbitsize, |
4611 unsigned HOST_WIDE_INT bitpos, | 4677 poly_uint64 pbitpos, |
4612 unsigned HOST_WIDE_INT bitregion_start, | 4678 poly_uint64 pbitregion_start, |
4613 unsigned HOST_WIDE_INT bitregion_end, | 4679 poly_uint64 pbitregion_end, |
4614 machine_mode mode1, rtx str_rtx, | 4680 machine_mode mode1, rtx str_rtx, |
4615 tree to, tree src, bool reverse) | 4681 tree to, tree src, bool reverse) |
4616 { | 4682 { |
4683 /* str_mode is not guaranteed to be a scalar type. */ | |
4617 machine_mode str_mode = GET_MODE (str_rtx); | 4684 machine_mode str_mode = GET_MODE (str_rtx); |
4618 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode); | 4685 unsigned int str_bitsize; |
4619 tree op0, op1; | 4686 tree op0, op1; |
4620 rtx value, result; | 4687 rtx value, result; |
4621 optab binop; | 4688 optab binop; |
4622 gimple *srcstmt; | 4689 gimple *srcstmt; |
4623 enum tree_code code; | 4690 enum tree_code code; |
4624 | 4691 |
4692 unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end; | |
4625 if (mode1 != VOIDmode | 4693 if (mode1 != VOIDmode |
4694 || !pbitsize.is_constant (&bitsize) | |
4695 || !pbitpos.is_constant (&bitpos) | |
4696 || !pbitregion_start.is_constant (&bitregion_start) | |
4697 || !pbitregion_end.is_constant (&bitregion_end) | |
4626 || bitsize >= BITS_PER_WORD | 4698 || bitsize >= BITS_PER_WORD |
4699 || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize) | |
4627 || str_bitsize > BITS_PER_WORD | 4700 || str_bitsize > BITS_PER_WORD |
4628 || TREE_SIDE_EFFECTS (to) | 4701 || TREE_SIDE_EFFECTS (to) |
4629 || TREE_THIS_VOLATILE (to)) | 4702 || TREE_THIS_VOLATILE (to)) |
4630 return false; | 4703 return false; |
4631 | 4704 |
4684 offset1 = (offset1 - bitpos) / BITS_PER_UNIT; | 4757 offset1 = (offset1 - bitpos) / BITS_PER_UNIT; |
4685 str_rtx = adjust_address (str_rtx, str_mode, offset1); | 4758 str_rtx = adjust_address (str_rtx, str_mode, offset1); |
4686 } | 4759 } |
4687 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG) | 4760 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG) |
4688 return false; | 4761 return false; |
4689 else | |
4690 gcc_assert (!reverse); | |
4691 | 4762 |
4692 /* If the bit field covers the whole REG/MEM, store_field | 4763 /* If the bit field covers the whole REG/MEM, store_field |
4693 will likely generate better code. */ | 4764 will likely generate better code. */ |
4694 if (bitsize >= str_bitsize) | 4765 if (bitsize >= str_bitsize) |
4695 return false; | 4766 return false; |
4797 | 4868 |
4798 If the access does not need to be restricted, 0 is returned in both | 4869 If the access does not need to be restricted, 0 is returned in both |
4799 *BITSTART and *BITEND. */ | 4870 *BITSTART and *BITEND. */ |
4800 | 4871 |
4801 void | 4872 void |
4802 get_bit_range (unsigned HOST_WIDE_INT *bitstart, | 4873 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp, |
4803 unsigned HOST_WIDE_INT *bitend, | 4874 poly_int64_pod *bitpos, tree *offset) |
4804 tree exp, | 4875 { |
4805 HOST_WIDE_INT *bitpos, | 4876 poly_int64 bitoffset; |
4806 tree *offset) | |
4807 { | |
4808 HOST_WIDE_INT bitoffset; | |
4809 tree field, repr; | 4877 tree field, repr; |
4810 | 4878 |
4811 gcc_assert (TREE_CODE (exp) == COMPONENT_REF); | 4879 gcc_assert (TREE_CODE (exp) == COMPONENT_REF); |
4812 | 4880 |
4813 field = TREE_OPERAND (exp, 1); | 4881 field = TREE_OPERAND (exp, 1); |
4824 part of a larger bit field, then the representative does not serve any | 4892 part of a larger bit field, then the representative does not serve any |
4825 useful purpose. This can occur in Ada. */ | 4893 useful purpose. This can occur in Ada. */ |
4826 if (handled_component_p (TREE_OPERAND (exp, 0))) | 4894 if (handled_component_p (TREE_OPERAND (exp, 0))) |
4827 { | 4895 { |
4828 machine_mode rmode; | 4896 machine_mode rmode; |
4829 HOST_WIDE_INT rbitsize, rbitpos; | 4897 poly_int64 rbitsize, rbitpos; |
4830 tree roffset; | 4898 tree roffset; |
4831 int unsignedp, reversep, volatilep = 0; | 4899 int unsignedp, reversep, volatilep = 0; |
4832 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, | 4900 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, |
4833 &roffset, &rmode, &unsignedp, &reversep, | 4901 &roffset, &rmode, &unsignedp, &reversep, |
4834 &volatilep); | 4902 &volatilep); |
4835 if ((rbitpos % BITS_PER_UNIT) != 0) | 4903 if (!multiple_p (rbitpos, BITS_PER_UNIT)) |
4836 { | 4904 { |
4837 *bitstart = *bitend = 0; | 4905 *bitstart = *bitend = 0; |
4838 return; | 4906 return; |
4839 } | 4907 } |
4840 } | 4908 } |
4841 | 4909 |
4842 /* Compute the adjustment to bitpos from the offset of the field | 4910 /* Compute the adjustment to bitpos from the offset of the field |
4843 relative to the representative. DECL_FIELD_OFFSET of field and | 4911 relative to the representative. DECL_FIELD_OFFSET of field and |
4844 repr are the same by construction if they are not constants, | 4912 repr are the same by construction if they are not constants, |
4845 see finish_bitfield_layout. */ | 4913 see finish_bitfield_layout. */ |
4846 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)) | 4914 poly_uint64 field_offset, repr_offset; |
4847 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))) | 4915 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset) |
4848 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field)) | 4916 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset)) |
4849 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT; | 4917 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT; |
4850 else | 4918 else |
4851 bitoffset = 0; | 4919 bitoffset = 0; |
4852 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) | 4920 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
4853 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); | 4921 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); |
4854 | 4922 |
4855 /* If the adjustment is larger than bitpos, we would have a negative bit | 4923 /* If the adjustment is larger than bitpos, we would have a negative bit |
4856 position for the lower bound and this may wreak havoc later. Adjust | 4924 position for the lower bound and this may wreak havoc later. Adjust |
4857 offset and bitpos to make the lower bound non-negative in that case. */ | 4925 offset and bitpos to make the lower bound non-negative in that case. */ |
4858 if (bitoffset > *bitpos) | 4926 if (maybe_gt (bitoffset, *bitpos)) |
4859 { | 4927 { |
4860 HOST_WIDE_INT adjust = bitoffset - *bitpos; | 4928 poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos; |
4861 gcc_assert ((adjust % BITS_PER_UNIT) == 0); | 4929 poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT); |
4862 | 4930 |
4863 *bitpos += adjust; | 4931 *bitpos += adjust_bits; |
4864 if (*offset == NULL_TREE) | 4932 if (*offset == NULL_TREE) |
4865 *offset = size_int (-adjust / BITS_PER_UNIT); | 4933 *offset = size_int (-adjust_bytes); |
4866 else | 4934 else |
4867 *offset | 4935 *offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes)); |
4868 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT)); | |
4869 *bitstart = 0; | 4936 *bitstart = 0; |
4870 } | 4937 } |
4871 else | 4938 else |
4872 *bitstart = *bitpos - bitoffset; | 4939 *bitstart = *bitpos - bitoffset; |
4873 | 4940 |
4874 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1; | 4941 *bitend = *bitstart + tree_to_poly_uint64 (DECL_SIZE (repr)) - 1; |
4875 } | 4942 } |
4876 | 4943 |
4877 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside | 4944 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside |
4878 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if | 4945 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if |
4879 DECL_RTL was not set yet, return NORTL. */ | 4946 DECL_RTL was not set yet, return NORTL. */ |
4976 && (REF_REVERSE_STORAGE_ORDER (to) | 5043 && (REF_REVERSE_STORAGE_ORDER (to) |
4977 || mem_ref_refers_to_non_mem_p (to))) | 5044 || mem_ref_refers_to_non_mem_p (to))) |
4978 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) | 5045 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) |
4979 { | 5046 { |
4980 machine_mode mode1; | 5047 machine_mode mode1; |
4981 HOST_WIDE_INT bitsize, bitpos; | 5048 poly_int64 bitsize, bitpos; |
4982 unsigned HOST_WIDE_INT bitregion_start = 0; | 5049 poly_uint64 bitregion_start = 0; |
4983 unsigned HOST_WIDE_INT bitregion_end = 0; | 5050 poly_uint64 bitregion_end = 0; |
4984 tree offset; | 5051 tree offset; |
4985 int unsignedp, reversep, volatilep = 0; | 5052 int unsignedp, reversep, volatilep = 0; |
4986 tree tem; | 5053 tree tem; |
4987 | 5054 |
4988 push_temp_slots (); | 5055 push_temp_slots (); |
4989 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, | 5056 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
4990 &unsignedp, &reversep, &volatilep); | 5057 &unsignedp, &reversep, &volatilep); |
4991 | 5058 |
4992 /* Make sure bitpos is not negative, it can wreak havoc later. */ | 5059 /* Make sure bitpos is not negative, it can wreak havoc later. */ |
4993 if (bitpos < 0) | 5060 if (maybe_lt (bitpos, 0)) |
4994 { | 5061 { |
4995 gcc_assert (offset == NULL_TREE); | 5062 gcc_assert (offset == NULL_TREE); |
4996 offset = size_int (bitpos >> LOG2_BITS_PER_UNIT); | 5063 offset = size_int (bits_to_bytes_round_down (bitpos)); |
4997 bitpos &= BITS_PER_UNIT - 1; | 5064 bitpos = num_trailing_bits (bitpos); |
4998 } | 5065 } |
4999 | 5066 |
5000 if (TREE_CODE (to) == COMPONENT_REF | 5067 if (TREE_CODE (to) == COMPONENT_REF |
5001 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1))) | 5068 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1))) |
5002 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset); | 5069 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset); |
5003 /* The C++ memory model naturally applies to byte-aligned fields. | 5070 /* The C++ memory model naturally applies to byte-aligned fields. |
5004 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or | 5071 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or |
5005 BITSIZE are not byte-aligned, there is no need to limit the range | 5072 BITSIZE are not byte-aligned, there is no need to limit the range |
5006 we can access. This can occur with packed structures in Ada. */ | 5073 we can access. This can occur with packed structures in Ada. */ |
5007 else if (bitsize > 0 | 5074 else if (maybe_gt (bitsize, 0) |
5008 && bitsize % BITS_PER_UNIT == 0 | 5075 && multiple_p (bitsize, BITS_PER_UNIT) |
5009 && bitpos % BITS_PER_UNIT == 0) | 5076 && multiple_p (bitpos, BITS_PER_UNIT)) |
5010 { | 5077 { |
5011 bitregion_start = bitpos; | 5078 bitregion_start = bitpos; |
5012 bitregion_end = bitpos + bitsize - 1; | 5079 bitregion_end = bitpos + bitsize - 1; |
5013 } | 5080 } |
5014 | 5081 |
5066 r124:DI=r123:DI+0x10 | 5133 r124:DI=r123:DI+0x10 |
5067 [r124:DI+0x8]=r121:DI | 5134 [r124:DI+0x8]=r121:DI |
5068 | 5135 |
5069 This is only done for aligned data values, as these can | 5136 This is only done for aligned data values, as these can |
5070 be expected to result in single move instructions. */ | 5137 be expected to result in single move instructions. */ |
5138 poly_int64 bytepos; | |
5071 if (mode1 != VOIDmode | 5139 if (mode1 != VOIDmode |
5072 && bitpos != 0 | 5140 && maybe_ne (bitpos, 0) |
5073 && bitsize > 0 | 5141 && maybe_gt (bitsize, 0) |
5074 && (bitpos % bitsize) == 0 | 5142 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos) |
5075 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 | 5143 && multiple_p (bitpos, bitsize) |
5144 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1)) | |
5076 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1)) | 5145 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1)) |
5077 { | 5146 { |
5078 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); | 5147 to_rtx = adjust_address (to_rtx, mode1, bytepos); |
5079 bitregion_start = 0; | 5148 bitregion_start = 0; |
5080 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos) | 5149 if (known_ge (bitregion_end, poly_uint64 (bitpos))) |
5081 bitregion_end -= bitpos; | 5150 bitregion_end -= bitpos; |
5082 bitpos = 0; | 5151 bitpos = 0; |
5083 } | 5152 } |
5084 | 5153 |
5085 to_rtx = offset_address (to_rtx, offset_rtx, | 5154 to_rtx = offset_address (to_rtx, offset_rtx, |
5090 /* No action is needed if the target is not a memory and the field | 5159 /* No action is needed if the target is not a memory and the field |
5091 lies completely outside that target. This can occur if the source | 5160 lies completely outside that target. This can occur if the source |
5092 code contains an out-of-bounds access to a small array. */ | 5161 code contains an out-of-bounds access to a small array. */ |
5093 if (!MEM_P (to_rtx) | 5162 if (!MEM_P (to_rtx) |
5094 && GET_MODE (to_rtx) != BLKmode | 5163 && GET_MODE (to_rtx) != BLKmode |
5095 && (unsigned HOST_WIDE_INT) bitpos | 5164 && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx)))) |
5096 >= GET_MODE_PRECISION (GET_MODE (to_rtx))) | |
5097 { | 5165 { |
5098 expand_normal (from); | 5166 expand_normal (from); |
5099 result = NULL; | 5167 result = NULL; |
5100 } | 5168 } |
5101 /* Handle expand_expr of a complex value returning a CONCAT. */ | 5169 /* Handle expand_expr of a complex value returning a CONCAT. */ |
5102 else if (GET_CODE (to_rtx) == CONCAT) | 5170 else if (GET_CODE (to_rtx) == CONCAT) |
5103 { | 5171 { |
5104 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx)); | 5172 machine_mode to_mode = GET_MODE (to_rtx); |
5105 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))) | 5173 gcc_checking_assert (COMPLEX_MODE_P (to_mode)); |
5106 && bitpos == 0 | 5174 poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode); |
5107 && bitsize == mode_bitsize) | 5175 unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode); |
5176 if (TYPE_MODE (TREE_TYPE (from)) == to_mode | |
5177 && known_eq (bitpos, 0) | |
5178 && known_eq (bitsize, mode_bitsize)) | |
5108 result = store_expr (from, to_rtx, false, nontemporal, reversep); | 5179 result = store_expr (from, to_rtx, false, nontemporal, reversep); |
5109 else if (bitsize == mode_bitsize / 2 | 5180 else if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE_INNER (to_mode) |
5110 && (bitpos == 0 || bitpos == mode_bitsize / 2)) | 5181 && known_eq (bitsize, inner_bitsize) |
5111 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false, | 5182 && (known_eq (bitpos, 0) |
5112 nontemporal, reversep); | 5183 || known_eq (bitpos, inner_bitsize))) |
5113 else if (bitpos + bitsize <= mode_bitsize / 2) | 5184 result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)), |
5185 false, nontemporal, reversep); | |
5186 else if (known_le (bitpos + bitsize, inner_bitsize)) | |
5114 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos, | 5187 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos, |
5115 bitregion_start, bitregion_end, | 5188 bitregion_start, bitregion_end, |
5116 mode1, from, get_alias_set (to), | 5189 mode1, from, get_alias_set (to), |
5117 nontemporal, reversep); | 5190 nontemporal, reversep); |
5118 else if (bitpos >= mode_bitsize / 2) | 5191 else if (known_ge (bitpos, inner_bitsize)) |
5119 result = store_field (XEXP (to_rtx, 1), bitsize, | 5192 result = store_field (XEXP (to_rtx, 1), bitsize, |
5120 bitpos - mode_bitsize / 2, | 5193 bitpos - inner_bitsize, |
5121 bitregion_start, bitregion_end, | 5194 bitregion_start, bitregion_end, |
5122 mode1, from, get_alias_set (to), | 5195 mode1, from, get_alias_set (to), |
5123 nontemporal, reversep); | 5196 nontemporal, reversep); |
5124 else if (bitpos == 0 && bitsize == mode_bitsize) | 5197 else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize)) |
5125 { | 5198 { |
5126 rtx from_rtx; | |
5127 result = expand_normal (from); | 5199 result = expand_normal (from); |
5128 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result, | 5200 if (GET_CODE (result) == CONCAT) |
5129 TYPE_MODE (TREE_TYPE (from)), 0); | 5201 { |
5130 emit_move_insn (XEXP (to_rtx, 0), | 5202 to_mode = GET_MODE_INNER (to_mode); |
5131 read_complex_part (from_rtx, false)); | 5203 machine_mode from_mode = GET_MODE_INNER (GET_MODE (result)); |
5132 emit_move_insn (XEXP (to_rtx, 1), | 5204 rtx from_real |
5133 read_complex_part (from_rtx, true)); | 5205 = simplify_gen_subreg (to_mode, XEXP (result, 0), |
5206 from_mode, 0); | |
5207 rtx from_imag | |
5208 = simplify_gen_subreg (to_mode, XEXP (result, 1), | |
5209 from_mode, 0); | |
5210 if (!from_real || !from_imag) | |
5211 goto concat_store_slow; | |
5212 emit_move_insn (XEXP (to_rtx, 0), from_real); | |
5213 emit_move_insn (XEXP (to_rtx, 1), from_imag); | |
5214 } | |
5215 else | |
5216 { | |
5217 rtx from_rtx | |
5218 = simplify_gen_subreg (to_mode, result, | |
5219 TYPE_MODE (TREE_TYPE (from)), 0); | |
5220 if (from_rtx) | |
5221 { | |
5222 emit_move_insn (XEXP (to_rtx, 0), | |
5223 read_complex_part (from_rtx, false)); | |
5224 emit_move_insn (XEXP (to_rtx, 1), | |
5225 read_complex_part (from_rtx, true)); | |
5226 } | |
5227 else | |
5228 { | |
5229 machine_mode to_mode | |
5230 = GET_MODE_INNER (GET_MODE (to_rtx)); | |
5231 rtx from_real | |
5232 = simplify_gen_subreg (to_mode, result, | |
5233 TYPE_MODE (TREE_TYPE (from)), | |
5234 0); | |
5235 rtx from_imag | |
5236 = simplify_gen_subreg (to_mode, result, | |
5237 TYPE_MODE (TREE_TYPE (from)), | |
5238 GET_MODE_SIZE (to_mode)); | |
5239 if (!from_real || !from_imag) | |
5240 goto concat_store_slow; | |
5241 emit_move_insn (XEXP (to_rtx, 0), from_real); | |
5242 emit_move_insn (XEXP (to_rtx, 1), from_imag); | |
5243 } | |
5244 } | |
5134 } | 5245 } |
5135 else | 5246 else |
5136 { | 5247 { |
5137 rtx temp = assign_stack_temp (GET_MODE (to_rtx), | 5248 concat_store_slow:; |
5249 rtx temp = assign_stack_temp (to_mode, | |
5138 GET_MODE_SIZE (GET_MODE (to_rtx))); | 5250 GET_MODE_SIZE (GET_MODE (to_rtx))); |
5139 write_complex_part (temp, XEXP (to_rtx, 0), false); | 5251 write_complex_part (temp, XEXP (to_rtx, 0), false); |
5140 write_complex_part (temp, XEXP (to_rtx, 1), true); | 5252 write_complex_part (temp, XEXP (to_rtx, 1), true); |
5141 result = store_field (temp, bitsize, bitpos, | 5253 result = store_field (temp, bitsize, bitpos, |
5142 bitregion_start, bitregion_end, | 5254 bitregion_start, bitregion_end, |
5156 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); | 5268 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); |
5157 if (volatilep) | 5269 if (volatilep) |
5158 MEM_VOLATILE_P (to_rtx) = 1; | 5270 MEM_VOLATILE_P (to_rtx) = 1; |
5159 } | 5271 } |
5160 | 5272 |
5273 gcc_checking_assert (known_ge (bitpos, 0)); | |
5161 if (optimize_bitfield_assignment_op (bitsize, bitpos, | 5274 if (optimize_bitfield_assignment_op (bitsize, bitpos, |
5162 bitregion_start, bitregion_end, | 5275 bitregion_start, bitregion_end, |
5163 mode1, to_rtx, to, from, | 5276 mode1, to_rtx, to, from, |
5164 reversep)) | 5277 reversep)) |
5165 result = NULL; | 5278 result = NULL; |
5194 || TREE_CODE (to) == RESULT_DECL) | 5307 || TREE_CODE (to) == RESULT_DECL) |
5195 && REG_P (DECL_RTL (to))) | 5308 && REG_P (DECL_RTL (to))) |
5196 || TREE_CODE (to) == SSA_NAME)) | 5309 || TREE_CODE (to) == SSA_NAME)) |
5197 { | 5310 { |
5198 rtx value; | 5311 rtx value; |
5199 rtx bounds; | |
5200 | 5312 |
5201 push_temp_slots (); | 5313 push_temp_slots (); |
5202 value = expand_normal (from); | 5314 value = expand_normal (from); |
5203 | |
5204 /* Split value and bounds to store them separately. */ | |
5205 chkp_split_slot (value, &value, &bounds); | |
5206 | 5315 |
5207 if (to_rtx == 0) | 5316 if (to_rtx == 0) |
5208 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); | 5317 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
5209 | 5318 |
5210 /* Handle calls that return values in multiple non-contiguous locations. | 5319 /* Handle calls that return values in multiple non-contiguous locations. |
5234 value = convert_memory_address_addr_space | 5343 value = convert_memory_address_addr_space |
5235 (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value, | 5344 (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value, |
5236 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to)))); | 5345 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to)))); |
5237 | 5346 |
5238 emit_move_insn (to_rtx, value); | 5347 emit_move_insn (to_rtx, value); |
5239 } | |
5240 | |
5241 /* Store bounds if required. */ | |
5242 if (bounds | |
5243 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to)))) | |
5244 { | |
5245 gcc_assert (MEM_P (to_rtx)); | |
5246 chkp_emit_bounds_store (bounds, value, to_rtx); | |
5247 } | 5348 } |
5248 | 5349 |
5249 preserve_temp_slots (to_rtx); | 5350 preserve_temp_slots (to_rtx); |
5250 pop_temp_slots (); | 5351 pop_temp_slots (); |
5251 return; | 5352 return; |
5314 } | 5415 } |
5315 | 5416 |
5316 /* Compute FROM and store the value in the rtx we got. */ | 5417 /* Compute FROM and store the value in the rtx we got. */ |
5317 | 5418 |
5318 push_temp_slots (); | 5419 push_temp_slots (); |
5319 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to); | 5420 result = store_expr (from, to_rtx, 0, nontemporal, false); |
5320 preserve_temp_slots (result); | 5421 preserve_temp_slots (result); |
5321 pop_temp_slots (); | 5422 pop_temp_slots (); |
5322 return; | 5423 return; |
5323 } | 5424 } |
5324 | 5425 |
5353 If CALL_PARAM_P is nonzero, this is a store into a call param on the | 5454 If CALL_PARAM_P is nonzero, this is a store into a call param on the |
5354 stack, and block moves may need to be treated specially. | 5455 stack, and block moves may need to be treated specially. |
5355 | 5456 |
5356 If NONTEMPORAL is true, try using a nontemporal store instruction. | 5457 If NONTEMPORAL is true, try using a nontemporal store instruction. |
5357 | 5458 |
5358 If REVERSE is true, the store is to be done in reverse order. | 5459 If REVERSE is true, the store is to be done in reverse order. */ |
5359 | |
5360 If BTARGET is not NULL then computed bounds of EXP are | |
5361 associated with BTARGET. */ | |
5362 | 5460 |
5363 rtx | 5461 rtx |
5364 store_expr_with_bounds (tree exp, rtx target, int call_param_p, | 5462 store_expr (tree exp, rtx target, int call_param_p, |
5365 bool nontemporal, bool reverse, tree btarget) | 5463 bool nontemporal, bool reverse) |
5366 { | 5464 { |
5367 rtx temp; | 5465 rtx temp; |
5368 rtx alt_rtl = NULL_RTX; | 5466 rtx alt_rtl = NULL_RTX; |
5369 location_t loc = curr_insn_location (); | 5467 location_t loc = curr_insn_location (); |
5370 | 5468 |
5381 { | 5479 { |
5382 /* Perform first part of compound expression, then assign from second | 5480 /* Perform first part of compound expression, then assign from second |
5383 part. */ | 5481 part. */ |
5384 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, | 5482 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
5385 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); | 5483 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); |
5386 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target, | 5484 return store_expr (TREE_OPERAND (exp, 1), target, |
5387 call_param_p, nontemporal, reverse, | 5485 call_param_p, nontemporal, reverse); |
5388 btarget); | |
5389 } | 5486 } |
5390 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | 5487 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) |
5391 { | 5488 { |
5392 /* For conditional expression, get safe form of the target. Then | 5489 /* For conditional expression, get safe form of the target. Then |
5393 test the condition, doing the appropriate assignment on either | 5490 test the condition, doing the appropriate assignment on either |
5398 | 5495 |
5399 do_pending_stack_adjust (); | 5496 do_pending_stack_adjust (); |
5400 NO_DEFER_POP; | 5497 NO_DEFER_POP; |
5401 jumpifnot (TREE_OPERAND (exp, 0), lab1, | 5498 jumpifnot (TREE_OPERAND (exp, 0), lab1, |
5402 profile_probability::uninitialized ()); | 5499 profile_probability::uninitialized ()); |
5403 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p, | 5500 store_expr (TREE_OPERAND (exp, 1), target, call_param_p, |
5404 nontemporal, reverse, btarget); | 5501 nontemporal, reverse); |
5405 emit_jump_insn (targetm.gen_jump (lab2)); | 5502 emit_jump_insn (targetm.gen_jump (lab2)); |
5406 emit_barrier (); | 5503 emit_barrier (); |
5407 emit_label (lab1); | 5504 emit_label (lab1); |
5408 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p, | 5505 store_expr (TREE_OPERAND (exp, 2), target, call_param_p, |
5409 nontemporal, reverse, btarget); | 5506 nontemporal, reverse); |
5410 emit_label (lab2); | 5507 emit_label (lab2); |
5411 OK_DEFER_POP; | 5508 OK_DEFER_POP; |
5412 | 5509 |
5413 return NULL_RTX; | 5510 return NULL_RTX; |
5414 } | 5511 } |
5457 } | 5554 } |
5458 | 5555 |
5459 temp = expand_expr (exp, inner_target, VOIDmode, | 5556 temp = expand_expr (exp, inner_target, VOIDmode, |
5460 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); | 5557 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); |
5461 | 5558 |
5462 /* Handle bounds returned by call. */ | |
5463 if (TREE_CODE (exp) == CALL_EXPR) | |
5464 { | |
5465 rtx bounds; | |
5466 chkp_split_slot (temp, &temp, &bounds); | |
5467 if (bounds && btarget) | |
5468 { | |
5469 gcc_assert (TREE_CODE (btarget) == SSA_NAME); | |
5470 rtx tmp = targetm.calls.load_returned_bounds (bounds); | |
5471 chkp_set_rtl_bounds (btarget, tmp); | |
5472 } | |
5473 } | |
5474 | 5559 |
5475 /* If TEMP is a VOIDmode constant, use convert_modes to make | 5560 /* If TEMP is a VOIDmode constant, use convert_modes to make |
5476 sure that we properly convert it. */ | 5561 sure that we properly convert it. */ |
5477 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | 5562 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) |
5478 { | 5563 { |
5550 tmp_target = nontemporal || reverse ? NULL_RTX : target; | 5635 tmp_target = nontemporal || reverse ? NULL_RTX : target; |
5551 temp = expand_expr_real (exp, tmp_target, GET_MODE (target), | 5636 temp = expand_expr_real (exp, tmp_target, GET_MODE (target), |
5552 (call_param_p | 5637 (call_param_p |
5553 ? EXPAND_STACK_PARM : EXPAND_NORMAL), | 5638 ? EXPAND_STACK_PARM : EXPAND_NORMAL), |
5554 &alt_rtl, false); | 5639 &alt_rtl, false); |
5555 | |
5556 /* Handle bounds returned by call. */ | |
5557 if (TREE_CODE (exp) == CALL_EXPR) | |
5558 { | |
5559 rtx bounds; | |
5560 chkp_split_slot (temp, &temp, &bounds); | |
5561 if (bounds && btarget) | |
5562 { | |
5563 gcc_assert (TREE_CODE (btarget) == SSA_NAME); | |
5564 rtx tmp = targetm.calls.load_returned_bounds (bounds); | |
5565 chkp_set_rtl_bounds (btarget, tmp); | |
5566 } | |
5567 } | |
5568 } | 5640 } |
5569 | 5641 |
5570 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not | 5642 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
5571 the same as that of TARGET, adjust the constant. This is needed, for | 5643 the same as that of TARGET, adjust the constant. This is needed, for |
5572 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want | 5644 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want |
5573 only a word-sized value. */ | 5645 only a word-sized value. */ |
5574 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | 5646 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode |
5575 && TREE_CODE (exp) != ERROR_MARK | 5647 && TREE_CODE (exp) != ERROR_MARK |
5576 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) | 5648 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
5577 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | 5649 { |
5578 temp, TYPE_UNSIGNED (TREE_TYPE (exp))); | 5650 if (GET_MODE_CLASS (GET_MODE (target)) |
5651 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) | |
5652 && known_eq (GET_MODE_BITSIZE (GET_MODE (target)), | |
5653 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp))))) | |
5654 { | |
5655 rtx t = simplify_gen_subreg (GET_MODE (target), temp, | |
5656 TYPE_MODE (TREE_TYPE (exp)), 0); | |
5657 if (t) | |
5658 temp = t; | |
5659 } | |
5660 if (GET_MODE (temp) == VOIDmode) | |
5661 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
5662 temp, TYPE_UNSIGNED (TREE_TYPE (exp))); | |
5663 } | |
5579 | 5664 |
5580 /* If value was not generated in the target, store it there. | 5665 /* If value was not generated in the target, store it there. |
5581 Convert the value to TARGET's type first if necessary and emit the | 5666 Convert the value to TARGET's type first if necessary and emit the |
5582 pending incrementations that have been queued when expanding EXP. | 5667 pending incrementations that have been queued when expanding EXP. |
5583 Note that we cannot emit the whole queue blindly because this will | 5668 Note that we cannot emit the whole queue blindly because this will |
5665 (call_param_p | 5750 (call_param_p |
5666 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | 5751 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
5667 | 5752 |
5668 /* Figure out how much is left in TARGET that we have to clear. | 5753 /* Figure out how much is left in TARGET that we have to clear. |
5669 Do all calculations in pointer_mode. */ | 5754 Do all calculations in pointer_mode. */ |
5670 if (CONST_INT_P (copy_size_rtx)) | 5755 poly_int64 const_copy_size; |
5756 if (poly_int_rtx_p (copy_size_rtx, &const_copy_size)) | |
5671 { | 5757 { |
5672 size = plus_constant (address_mode, size, | 5758 size = plus_constant (address_mode, size, -const_copy_size); |
5673 -INTVAL (copy_size_rtx)); | 5759 target = adjust_address (target, BLKmode, const_copy_size); |
5674 target = adjust_address (target, BLKmode, | |
5675 INTVAL (copy_size_rtx)); | |
5676 } | 5760 } |
5677 else | 5761 else |
5678 { | 5762 { |
5679 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, | 5763 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, |
5680 copy_size_rtx, NULL_RTX, 0, | 5764 copy_size_rtx, NULL_RTX, 0, |
5728 emit_move_insn (target, temp); | 5812 emit_move_insn (target, temp); |
5729 } | 5813 } |
5730 } | 5814 } |
5731 | 5815 |
5732 return NULL_RTX; | 5816 return NULL_RTX; |
5733 } | |
5734 | |
5735 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */ | |
5736 rtx | |
5737 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal, | |
5738 bool reverse) | |
5739 { | |
5740 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, | |
5741 reverse, NULL); | |
5742 } | 5817 } |
5743 | 5818 |
5744 /* Return true if field F of structure TYPE is a flexible array. */ | 5819 /* Return true if field F of structure TYPE is a flexible array. */ |
5745 | 5820 |
5746 static bool | 5821 static bool |
5836 | 5911 |
5837 case COMPLEX_TYPE: | 5912 case COMPLEX_TYPE: |
5838 return 2; | 5913 return 2; |
5839 | 5914 |
5840 case VECTOR_TYPE: | 5915 case VECTOR_TYPE: |
5841 return TYPE_VECTOR_SUBPARTS (type); | 5916 { |
5917 unsigned HOST_WIDE_INT nelts; | |
5918 if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts)) | |
5919 return nelts; | |
5920 else | |
5921 return -1; | |
5922 } | |
5842 | 5923 |
5843 case INTEGER_TYPE: | 5924 case INTEGER_TYPE: |
5844 case REAL_TYPE: | 5925 case REAL_TYPE: |
5845 case FIXED_POINT_TYPE: | 5926 case FIXED_POINT_TYPE: |
5846 case ENUMERAL_TYPE: | 5927 case ENUMERAL_TYPE: |
5938 init_elts += mult; | 6019 init_elts += mult; |
5939 break; | 6020 break; |
5940 | 6021 |
5941 case VECTOR_CST: | 6022 case VECTOR_CST: |
5942 { | 6023 { |
5943 unsigned i; | 6024 /* We can only construct constant-length vectors using |
5944 for (i = 0; i < VECTOR_CST_NELTS (value); ++i) | 6025 CONSTRUCTOR. */ |
6026 unsigned int nunits = VECTOR_CST_NELTS (value).to_constant (); | |
6027 for (unsigned int i = 0; i < nunits; ++i) | |
5945 { | 6028 { |
5946 tree v = VECTOR_CST_ELT (value, i); | 6029 tree v = VECTOR_CST_ELT (value, i); |
5947 if (!initializer_zerop (v)) | 6030 if (!initializer_zerop (v)) |
5948 nz_elts += mult; | 6031 nz_elts += mult; |
5949 init_elts += mult; | 6032 init_elts += mult; |
6074 necessary to go through store_field. This is so that we can pass through | 6157 necessary to go through store_field. This is so that we can pass through |
6075 the cleared field to let store_constructor know that we may not have to | 6158 the cleared field to let store_constructor know that we may not have to |
6076 clear a substructure if the outer structure has already been cleared. */ | 6159 clear a substructure if the outer structure has already been cleared. */ |
6077 | 6160 |
6078 static void | 6161 static void |
6079 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, | 6162 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos, |
6080 HOST_WIDE_INT bitpos, | 6163 poly_uint64 bitregion_start, |
6081 unsigned HOST_WIDE_INT bitregion_start, | 6164 poly_uint64 bitregion_end, |
6082 unsigned HOST_WIDE_INT bitregion_end, | |
6083 machine_mode mode, | 6165 machine_mode mode, |
6084 tree exp, int cleared, | 6166 tree exp, int cleared, |
6085 alias_set_type alias_set, bool reverse) | 6167 alias_set_type alias_set, bool reverse) |
6086 { | 6168 { |
6169 poly_int64 bytepos; | |
6170 poly_uint64 bytesize; | |
6087 if (TREE_CODE (exp) == CONSTRUCTOR | 6171 if (TREE_CODE (exp) == CONSTRUCTOR |
6088 /* We can only call store_constructor recursively if the size and | 6172 /* We can only call store_constructor recursively if the size and |
6089 bit position are on a byte boundary. */ | 6173 bit position are on a byte boundary. */ |
6090 && bitpos % BITS_PER_UNIT == 0 | 6174 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos) |
6091 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) | 6175 && maybe_ne (bitsize, 0U) |
6176 && multiple_p (bitsize, BITS_PER_UNIT, &bytesize) | |
6092 /* If we have a nonzero bitpos for a register target, then we just | 6177 /* If we have a nonzero bitpos for a register target, then we just |
6093 let store_field do the bitfield handling. This is unlikely to | 6178 let store_field do the bitfield handling. This is unlikely to |
6094 generate unnecessary clear instructions anyways. */ | 6179 generate unnecessary clear instructions anyways. */ |
6095 && (bitpos == 0 || MEM_P (target))) | 6180 && (known_eq (bitpos, 0) || MEM_P (target))) |
6096 { | 6181 { |
6097 if (MEM_P (target)) | 6182 if (MEM_P (target)) |
6098 target | 6183 { |
6099 = adjust_address (target, | 6184 machine_mode target_mode = GET_MODE (target); |
6100 GET_MODE (target) == BLKmode | 6185 if (target_mode != BLKmode |
6101 || 0 != (bitpos | 6186 && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode))) |
6102 % GET_MODE_ALIGNMENT (GET_MODE (target))) | 6187 target_mode = BLKmode; |
6103 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); | 6188 target = adjust_address (target, target_mode, bytepos); |
6189 } | |
6104 | 6190 |
6105 | 6191 |
6106 /* Update the alias set, if required. */ | 6192 /* Update the alias set, if required. */ |
6107 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) | 6193 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) |
6108 && MEM_ALIAS_SET (target) != 0) | 6194 && MEM_ALIAS_SET (target) != 0) |
6109 { | 6195 { |
6110 target = copy_rtx (target); | 6196 target = copy_rtx (target); |
6111 set_mem_alias_set (target, alias_set); | 6197 set_mem_alias_set (target, alias_set); |
6112 } | 6198 } |
6113 | 6199 |
6114 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT, | 6200 store_constructor (exp, target, cleared, bytesize, reverse); |
6115 reverse); | |
6116 } | 6201 } |
6117 else | 6202 else |
6118 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode, | 6203 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode, |
6119 exp, alias_set, false, reverse); | 6204 exp, alias_set, false, reverse); |
6120 } | 6205 } |
6144 may not be the same as the size of EXP if we are assigning to a field | 6229 may not be the same as the size of EXP if we are assigning to a field |
6145 which has been packed to exclude padding bits. | 6230 which has been packed to exclude padding bits. |
6146 If REVERSE is true, the store is to be done in reverse order. */ | 6231 If REVERSE is true, the store is to be done in reverse order. */ |
6147 | 6232 |
6148 static void | 6233 static void |
6149 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size, | 6234 store_constructor (tree exp, rtx target, int cleared, poly_int64 size, |
6150 bool reverse) | 6235 bool reverse) |
6151 { | 6236 { |
6152 tree type = TREE_TYPE (exp); | 6237 tree type = TREE_TYPE (exp); |
6153 HOST_WIDE_INT exp_size = int_size_in_bytes (type); | 6238 HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
6154 HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0; | 6239 poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0; |
6155 | 6240 |
6156 switch (TREE_CODE (type)) | 6241 switch (TREE_CODE (type)) |
6157 { | 6242 { |
6158 case RECORD_TYPE: | 6243 case RECORD_TYPE: |
6159 case UNION_TYPE: | 6244 case UNION_TYPE: |
6164 | 6249 |
6165 /* The storage order is specified for every aggregate type. */ | 6250 /* The storage order is specified for every aggregate type. */ |
6166 reverse = TYPE_REVERSE_STORAGE_ORDER (type); | 6251 reverse = TYPE_REVERSE_STORAGE_ORDER (type); |
6167 | 6252 |
6168 /* If size is zero or the target is already cleared, do nothing. */ | 6253 /* If size is zero or the target is already cleared, do nothing. */ |
6169 if (size == 0 || cleared) | 6254 if (known_eq (size, 0) || cleared) |
6170 cleared = 1; | 6255 cleared = 1; |
6171 /* We either clear the aggregate or indicate the value is dead. */ | 6256 /* We either clear the aggregate or indicate the value is dead. */ |
6172 else if ((TREE_CODE (type) == UNION_TYPE | 6257 else if ((TREE_CODE (type) == UNION_TYPE |
6173 || TREE_CODE (type) == QUAL_UNION_TYPE) | 6258 || TREE_CODE (type) == QUAL_UNION_TYPE) |
6174 && ! CONSTRUCTOR_ELTS (exp)) | 6259 && ! CONSTRUCTOR_ELTS (exp)) |
6181 /* If we are building a static constructor into a register, | 6266 /* If we are building a static constructor into a register, |
6182 set the initial value as zero so we can fold the value into | 6267 set the initial value as zero so we can fold the value into |
6183 a constant. But if more than one register is involved, | 6268 a constant. But if more than one register is involved, |
6184 this probably loses. */ | 6269 this probably loses. */ |
6185 else if (REG_P (target) && TREE_STATIC (exp) | 6270 else if (REG_P (target) && TREE_STATIC (exp) |
6186 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) | 6271 && known_le (GET_MODE_SIZE (GET_MODE (target)), |
6272 REGMODE_NATURAL_SIZE (GET_MODE (target)))) | |
6187 { | 6273 { |
6188 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | 6274 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
6189 cleared = 1; | 6275 cleared = 1; |
6190 } | 6276 } |
6191 | 6277 |
6192 /* If the constructor has fewer fields than the structure or | 6278 /* If the constructor has fewer fields than the structure or |
6193 if we are initializing the structure to mostly zeros, clear | 6279 if we are initializing the structure to mostly zeros, clear |
6194 the whole structure first. Don't do this if TARGET is a | 6280 the whole structure first. Don't do this if TARGET is a |
6195 register whose mode size isn't equal to SIZE since | 6281 register whose mode size isn't equal to SIZE since |
6196 clear_storage can't handle this case. */ | 6282 clear_storage can't handle this case. */ |
6197 else if (size > 0 | 6283 else if (known_size_p (size) |
6198 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type)) | 6284 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type)) |
6199 || mostly_zeros_p (exp)) | 6285 || mostly_zeros_p (exp)) |
6200 && (!REG_P (target) | 6286 && (!REG_P (target) |
6201 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) | 6287 || known_eq (GET_MODE_SIZE (GET_MODE (target)), size))) |
6202 == size))) | |
6203 { | 6288 { |
6204 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); | 6289 clear_storage (target, gen_int_mode (size, Pmode), |
6290 BLOCK_OP_NORMAL); | |
6205 cleared = 1; | 6291 cleared = 1; |
6206 } | 6292 } |
6207 | 6293 |
6208 if (REG_P (target) && !cleared) | 6294 if (REG_P (target) && !cleared) |
6209 emit_clobber (target); | 6295 emit_clobber (target); |
6380 && (count < maxelt - minelt + 1 | 6466 && (count < maxelt - minelt + 1 |
6381 || 4 * zero_count >= 3 * count)) | 6467 || 4 * zero_count >= 3 * count)) |
6382 need_to_clear = 1; | 6468 need_to_clear = 1; |
6383 } | 6469 } |
6384 | 6470 |
6385 if (need_to_clear && size > 0) | 6471 if (need_to_clear && maybe_gt (size, 0)) |
6386 { | 6472 { |
6387 if (REG_P (target)) | 6473 if (REG_P (target)) |
6388 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | 6474 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
6389 else | 6475 else |
6390 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); | 6476 clear_storage (target, gen_int_mode (size, Pmode), |
6477 BLOCK_OP_NORMAL); | |
6391 cleared = 1; | 6478 cleared = 1; |
6392 } | 6479 } |
6393 | 6480 |
6394 if (!cleared && REG_P (target)) | 6481 if (!cleared && REG_P (target)) |
6395 /* Inform later passes that the old value is dead. */ | 6482 /* Inform later passes that the old value is dead. */ |
6399 corresponding element of TARGET, determined by counting the | 6486 corresponding element of TARGET, determined by counting the |
6400 elements. */ | 6487 elements. */ |
6401 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) | 6488 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) |
6402 { | 6489 { |
6403 machine_mode mode; | 6490 machine_mode mode; |
6404 HOST_WIDE_INT bitsize; | 6491 poly_int64 bitsize; |
6405 HOST_WIDE_INT bitpos; | 6492 HOST_WIDE_INT bitpos; |
6406 rtx xtarget = target; | 6493 rtx xtarget = target; |
6407 | 6494 |
6408 if (cleared && initializer_zerop (value)) | 6495 if (cleared && initializer_zerop (value)) |
6409 continue; | 6496 continue; |
6410 | 6497 |
6411 mode = TYPE_MODE (elttype); | 6498 mode = TYPE_MODE (elttype); |
6412 if (mode == BLKmode) | 6499 if (mode != BLKmode) |
6413 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype)) | |
6414 ? tree_to_uhwi (TYPE_SIZE (elttype)) | |
6415 : -1); | |
6416 else | |
6417 bitsize = GET_MODE_BITSIZE (mode); | 6500 bitsize = GET_MODE_BITSIZE (mode); |
6501 else if (!poly_int_tree_p (TYPE_SIZE (elttype), &bitsize)) | |
6502 bitsize = -1; | |
6418 | 6503 |
6419 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) | 6504 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
6420 { | 6505 { |
6421 tree lo_index = TREE_OPERAND (index, 0); | 6506 tree lo_index = TREE_OPERAND (index, 0); |
6422 tree hi_index = TREE_OPERAND (index, 1); | 6507 tree hi_index = TREE_OPERAND (index, 1); |
6492 xtarget = offset_address (target, pos_rtx, | 6577 xtarget = offset_address (target, pos_rtx, |
6493 highest_pow2_factor (position)); | 6578 highest_pow2_factor (position)); |
6494 xtarget = adjust_address (xtarget, mode, 0); | 6579 xtarget = adjust_address (xtarget, mode, 0); |
6495 if (TREE_CODE (value) == CONSTRUCTOR) | 6580 if (TREE_CODE (value) == CONSTRUCTOR) |
6496 store_constructor (value, xtarget, cleared, | 6581 store_constructor (value, xtarget, cleared, |
6497 bitsize / BITS_PER_UNIT, reverse); | 6582 exact_div (bitsize, BITS_PER_UNIT), |
6583 reverse); | |
6498 else | 6584 else |
6499 store_expr (value, xtarget, 0, false, reverse); | 6585 store_expr (value, xtarget, 0, false, reverse); |
6500 | 6586 |
6501 /* Generate a conditional jump to exit the loop. */ | 6587 /* Generate a conditional jump to exit the loop. */ |
6502 exit_cond = build2 (LT_EXPR, integer_type_node, | 6588 exit_cond = build2 (LT_EXPR, integer_type_node, |
6570 { | 6656 { |
6571 unsigned HOST_WIDE_INT idx; | 6657 unsigned HOST_WIDE_INT idx; |
6572 constructor_elt *ce; | 6658 constructor_elt *ce; |
6573 int i; | 6659 int i; |
6574 int need_to_clear; | 6660 int need_to_clear; |
6575 int icode = CODE_FOR_nothing; | 6661 insn_code icode = CODE_FOR_nothing; |
6662 tree elt; | |
6576 tree elttype = TREE_TYPE (type); | 6663 tree elttype = TREE_TYPE (type); |
6577 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype)); | 6664 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype)); |
6578 machine_mode eltmode = TYPE_MODE (elttype); | 6665 machine_mode eltmode = TYPE_MODE (elttype); |
6579 HOST_WIDE_INT bitsize; | 6666 HOST_WIDE_INT bitsize; |
6580 HOST_WIDE_INT bitpos; | 6667 HOST_WIDE_INT bitpos; |
6581 rtvec vector = NULL; | 6668 rtvec vector = NULL; |
6582 unsigned n_elts; | 6669 poly_uint64 n_elts; |
6670 unsigned HOST_WIDE_INT const_n_elts; | |
6583 alias_set_type alias; | 6671 alias_set_type alias; |
6584 bool vec_vec_init_p = false; | 6672 bool vec_vec_init_p = false; |
6673 machine_mode mode = GET_MODE (target); | |
6585 | 6674 |
6586 gcc_assert (eltmode != BLKmode); | 6675 gcc_assert (eltmode != BLKmode); |
6587 | 6676 |
6677 /* Try using vec_duplicate_optab for uniform vectors. */ | |
6678 if (!TREE_SIDE_EFFECTS (exp) | |
6679 && VECTOR_MODE_P (mode) | |
6680 && eltmode == GET_MODE_INNER (mode) | |
6681 && ((icode = optab_handler (vec_duplicate_optab, mode)) | |
6682 != CODE_FOR_nothing) | |
6683 && (elt = uniform_vector_p (exp))) | |
6684 { | |
6685 struct expand_operand ops[2]; | |
6686 create_output_operand (&ops[0], target, mode); | |
6687 create_input_operand (&ops[1], expand_normal (elt), eltmode); | |
6688 expand_insn (icode, 2, ops); | |
6689 if (!rtx_equal_p (target, ops[0].value)) | |
6690 emit_move_insn (target, ops[0].value); | |
6691 break; | |
6692 } | |
6693 | |
6588 n_elts = TYPE_VECTOR_SUBPARTS (type); | 6694 n_elts = TYPE_VECTOR_SUBPARTS (type); |
6589 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) | 6695 if (REG_P (target) |
6696 && VECTOR_MODE_P (mode) | |
6697 && n_elts.is_constant (&const_n_elts)) | |
6590 { | 6698 { |
6591 machine_mode mode = GET_MODE (target); | |
6592 machine_mode emode = eltmode; | 6699 machine_mode emode = eltmode; |
6593 | 6700 |
6594 if (CONSTRUCTOR_NELTS (exp) | 6701 if (CONSTRUCTOR_NELTS (exp) |
6595 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value)) | 6702 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value)) |
6596 == VECTOR_TYPE)) | 6703 == VECTOR_TYPE)) |
6597 { | 6704 { |
6598 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value); | 6705 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value); |
6599 gcc_assert (CONSTRUCTOR_NELTS (exp) * TYPE_VECTOR_SUBPARTS (etype) | 6706 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp) |
6600 == n_elts); | 6707 * TYPE_VECTOR_SUBPARTS (etype), |
6708 n_elts)); | |
6601 emode = TYPE_MODE (etype); | 6709 emode = TYPE_MODE (etype); |
6602 } | 6710 } |
6603 icode = (int) convert_optab_handler (vec_init_optab, mode, emode); | 6711 icode = convert_optab_handler (vec_init_optab, mode, emode); |
6604 if (icode != CODE_FOR_nothing) | 6712 if (icode != CODE_FOR_nothing) |
6605 { | 6713 { |
6606 unsigned int i, n = n_elts; | 6714 unsigned int i, n = const_n_elts; |
6607 | 6715 |
6608 if (emode != eltmode) | 6716 if (emode != eltmode) |
6609 { | 6717 { |
6610 n = CONSTRUCTOR_NELTS (exp); | 6718 n = CONSTRUCTOR_NELTS (exp); |
6611 vec_vec_init_p = true; | 6719 vec_vec_init_p = true; |
6640 zero_count += n_elts_here; | 6748 zero_count += n_elts_here; |
6641 } | 6749 } |
6642 | 6750 |
6643 /* Clear the entire vector first if there are any missing elements, | 6751 /* Clear the entire vector first if there are any missing elements, |
6644 or if the incidence of zero elements is >= 75%. */ | 6752 or if the incidence of zero elements is >= 75%. */ |
6645 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); | 6753 need_to_clear = (maybe_lt (count, n_elts) |
6754 || 4 * zero_count >= 3 * count); | |
6646 } | 6755 } |
6647 | 6756 |
6648 if (need_to_clear && size > 0 && !vector) | 6757 if (need_to_clear && maybe_gt (size, 0) && !vector) |
6649 { | 6758 { |
6650 if (REG_P (target)) | 6759 if (REG_P (target)) |
6651 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | 6760 emit_move_insn (target, CONST0_RTX (mode)); |
6652 else | 6761 else |
6653 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); | 6762 clear_storage (target, gen_int_mode (size, Pmode), |
6763 BLOCK_OP_NORMAL); | |
6654 cleared = 1; | 6764 cleared = 1; |
6655 } | 6765 } |
6656 | 6766 |
6657 /* Inform later passes that the old value is dead. */ | 6767 /* Inform later passes that the old value is dead. */ |
6658 if (!cleared && !vector && REG_P (target)) | 6768 if (!cleared && !vector && REG_P (target)) |
6659 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | 6769 emit_move_insn (target, CONST0_RTX (mode)); |
6660 | 6770 |
6661 if (MEM_P (target)) | 6771 if (MEM_P (target)) |
6662 alias = MEM_ALIAS_SET (target); | 6772 alias = MEM_ALIAS_SET (target); |
6663 else | 6773 else |
6664 alias = get_alias_set (elttype); | 6774 alias = get_alias_set (elttype); |
6705 } | 6815 } |
6706 } | 6816 } |
6707 | 6817 |
6708 if (vector) | 6818 if (vector) |
6709 emit_insn (GEN_FCN (icode) (target, | 6819 emit_insn (GEN_FCN (icode) (target, |
6710 gen_rtx_PARALLEL (GET_MODE (target), | 6820 gen_rtx_PARALLEL (mode, vector))); |
6711 vector))); | |
6712 break; | 6821 break; |
6713 } | 6822 } |
6714 | 6823 |
6715 default: | 6824 default: |
6716 gcc_unreachable (); | 6825 gcc_unreachable (); |
6737 If NONTEMPORAL is true, try generating a nontemporal store. | 6846 If NONTEMPORAL is true, try generating a nontemporal store. |
6738 | 6847 |
6739 If REVERSE is true, the store is to be done in reverse order. */ | 6848 If REVERSE is true, the store is to be done in reverse order. */ |
6740 | 6849 |
6741 static rtx | 6850 static rtx |
6742 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, | 6851 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos, |
6743 unsigned HOST_WIDE_INT bitregion_start, | 6852 poly_uint64 bitregion_start, poly_uint64 bitregion_end, |
6744 unsigned HOST_WIDE_INT bitregion_end, | |
6745 machine_mode mode, tree exp, | 6853 machine_mode mode, tree exp, |
6746 alias_set_type alias_set, bool nontemporal, bool reverse) | 6854 alias_set_type alias_set, bool nontemporal, bool reverse) |
6747 { | 6855 { |
6748 if (TREE_CODE (exp) == ERROR_MARK) | 6856 if (TREE_CODE (exp) == ERROR_MARK) |
6749 return const0_rtx; | 6857 return const0_rtx; |
6750 | 6858 |
6751 /* If we have nothing to store, do nothing unless the expression has | 6859 /* If we have nothing to store, do nothing unless the expression has |
6752 side-effects. Don't do that for zero sized addressable lhs of | 6860 side-effects. Don't do that for zero sized addressable lhs of |
6753 calls. */ | 6861 calls. */ |
6754 if (bitsize == 0 | 6862 if (known_eq (bitsize, 0) |
6755 && (!TREE_ADDRESSABLE (TREE_TYPE (exp)) | 6863 && (!TREE_ADDRESSABLE (TREE_TYPE (exp)) |
6756 || TREE_CODE (exp) != CALL_EXPR)) | 6864 || TREE_CODE (exp) != CALL_EXPR)) |
6757 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); | 6865 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); |
6758 | 6866 |
6759 if (GET_CODE (target) == CONCAT) | 6867 if (GET_CODE (target) == CONCAT) |
6760 { | 6868 { |
6761 /* We're storing into a struct containing a single __complex. */ | 6869 /* We're storing into a struct containing a single __complex. */ |
6762 | 6870 |
6763 gcc_assert (!bitpos); | 6871 gcc_assert (known_eq (bitpos, 0)); |
6764 return store_expr (exp, target, 0, nontemporal, reverse); | 6872 return store_expr (exp, target, 0, nontemporal, reverse); |
6765 } | 6873 } |
6766 | 6874 |
6767 /* If the structure is in a register or if the component | 6875 /* If the structure is in a register or if the component |
6768 is a bit field, we cannot use addressing to access it. | 6876 is a bit field, we cannot use addressing to access it. |
6769 Use bit-field techniques or SUBREG to store in it. */ | 6877 Use bit-field techniques or SUBREG to store in it. */ |
6770 | 6878 |
6879 poly_int64 decl_bitsize; | |
6771 if (mode == VOIDmode | 6880 if (mode == VOIDmode |
6772 || (mode != BLKmode && ! direct_store[(int) mode] | 6881 || (mode != BLKmode && ! direct_store[(int) mode] |
6773 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | 6882 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT |
6774 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | 6883 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) |
6775 || REG_P (target) | 6884 || REG_P (target) |
6776 || GET_CODE (target) == SUBREG | 6885 || GET_CODE (target) == SUBREG |
6777 /* If the field isn't aligned enough to store as an ordinary memref, | 6886 /* If the field isn't aligned enough to store as an ordinary memref, |
6778 store it as a bit field. */ | 6887 store it as a bit field. */ |
6779 || (mode != BLKmode | 6888 || (mode != BLKmode |
6780 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) | 6889 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) |
6781 || bitpos % GET_MODE_ALIGNMENT (mode)) | 6890 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode))) |
6782 && targetm.slow_unaligned_access (mode, MEM_ALIGN (target))) | 6891 && targetm.slow_unaligned_access (mode, MEM_ALIGN (target))) |
6783 || (bitpos % BITS_PER_UNIT != 0))) | 6892 || !multiple_p (bitpos, BITS_PER_UNIT))) |
6784 || (bitsize >= 0 && mode != BLKmode | 6893 || (known_size_p (bitsize) |
6785 && GET_MODE_BITSIZE (mode) > bitsize) | 6894 && mode != BLKmode |
6895 && maybe_gt (GET_MODE_BITSIZE (mode), bitsize)) | |
6786 /* If the RHS and field are a constant size and the size of the | 6896 /* If the RHS and field are a constant size and the size of the |
6787 RHS isn't the same size as the bitfield, we must use bitfield | 6897 RHS isn't the same size as the bitfield, we must use bitfield |
6788 operations. */ | 6898 operations. */ |
6789 || (bitsize >= 0 | 6899 || (known_size_p (bitsize) |
6790 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | 6900 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))) |
6791 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0 | 6901 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))), |
6902 bitsize) | |
6792 /* Except for initialization of full bytes from a CONSTRUCTOR, which | 6903 /* Except for initialization of full bytes from a CONSTRUCTOR, which |
6793 we will handle specially below. */ | 6904 we will handle specially below. */ |
6794 && !(TREE_CODE (exp) == CONSTRUCTOR | 6905 && !(TREE_CODE (exp) == CONSTRUCTOR |
6795 && bitsize % BITS_PER_UNIT == 0) | 6906 && multiple_p (bitsize, BITS_PER_UNIT)) |
6796 /* And except for bitwise copying of TREE_ADDRESSABLE types, | 6907 /* And except for bitwise copying of TREE_ADDRESSABLE types, |
6797 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp) | 6908 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp) |
6798 includes some extra padding. store_expr / expand_expr will in | 6909 includes some extra padding. store_expr / expand_expr will in |
6799 that case call get_inner_reference that will have the bitsize | 6910 that case call get_inner_reference that will have the bitsize |
6800 we check here and thus the block move will not clobber the | 6911 we check here and thus the block move will not clobber the |
6801 padding that shouldn't be clobbered. In the future we could | 6912 padding that shouldn't be clobbered. In the future we could |
6802 replace the TREE_ADDRESSABLE check with a check that | 6913 replace the TREE_ADDRESSABLE check with a check that |
6803 get_base_address needs to live in memory. */ | 6914 get_base_address needs to live in memory. */ |
6804 && (!TREE_ADDRESSABLE (TREE_TYPE (exp)) | 6915 && (!TREE_ADDRESSABLE (TREE_TYPE (exp)) |
6805 || TREE_CODE (exp) != COMPONENT_REF | 6916 || TREE_CODE (exp) != COMPONENT_REF |
6806 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST | 6917 || !multiple_p (bitsize, BITS_PER_UNIT) |
6807 || (bitsize % BITS_PER_UNIT != 0) | 6918 || !multiple_p (bitpos, BITS_PER_UNIT) |
6808 || (bitpos % BITS_PER_UNIT != 0) | 6919 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)), |
6809 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize) | 6920 &decl_bitsize) |
6810 != 0))) | 6921 || maybe_ne (decl_bitsize, bitsize))) |
6811 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable | 6922 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable |
6812 decl we must use bitfield operations. */ | 6923 decl we must use bitfield operations. */ |
6813 || (bitsize >= 0 | 6924 || (known_size_p (bitsize) |
6814 && TREE_CODE (exp) == MEM_REF | 6925 && TREE_CODE (exp) == MEM_REF |
6815 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | 6926 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR |
6816 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) | 6927 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
6817 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) | 6928 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
6818 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode)) | 6929 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode)) |
6828 nop_def = get_def_for_expr (exp, NOP_EXPR); | 6939 nop_def = get_def_for_expr (exp, NOP_EXPR); |
6829 if (nop_def) | 6940 if (nop_def) |
6830 { | 6941 { |
6831 tree type = TREE_TYPE (exp); | 6942 tree type = TREE_TYPE (exp); |
6832 if (INTEGRAL_TYPE_P (type) | 6943 if (INTEGRAL_TYPE_P (type) |
6833 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) | 6944 && maybe_ne (TYPE_PRECISION (type), |
6834 && bitsize == TYPE_PRECISION (type)) | 6945 GET_MODE_BITSIZE (TYPE_MODE (type))) |
6946 && known_eq (bitsize, TYPE_PRECISION (type))) | |
6835 { | 6947 { |
6836 tree op = gimple_assign_rhs1 (nop_def); | 6948 tree op = gimple_assign_rhs1 (nop_def); |
6837 type = TREE_TYPE (op); | 6949 type = TREE_TYPE (op); |
6838 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) | 6950 if (INTEGRAL_TYPE_P (type) |
6951 && known_ge (TYPE_PRECISION (type), bitsize)) | |
6839 exp = op; | 6952 exp = op; |
6840 } | 6953 } |
6841 } | 6954 } |
6842 | 6955 |
6843 temp = expand_normal (exp); | 6956 temp = expand_normal (exp); |
6957 | |
6958 /* We don't support variable-sized BLKmode bitfields, since our | |
6959 handling of BLKmode is bound up with the ability to break | |
6960 things into words. */ | |
6961 gcc_assert (mode != BLKmode || bitsize.is_constant ()); | |
6844 | 6962 |
6845 /* Handle calls that return values in multiple non-contiguous locations. | 6963 /* Handle calls that return values in multiple non-contiguous locations. |
6846 The Irix 6 ABI has examples of this. */ | 6964 The Irix 6 ABI has examples of this. */ |
6847 if (GET_CODE (temp) == PARALLEL) | 6965 if (GET_CODE (temp) == PARALLEL) |
6848 { | 6966 { |
6849 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp)); | 6967 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp)); |
6850 scalar_int_mode temp_mode | 6968 machine_mode temp_mode = GET_MODE (temp); |
6851 = smallest_int_mode_for_size (size * BITS_PER_UNIT); | 6969 if (temp_mode == BLKmode || temp_mode == VOIDmode) |
6970 temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT); | |
6852 rtx temp_target = gen_reg_rtx (temp_mode); | 6971 rtx temp_target = gen_reg_rtx (temp_mode); |
6853 emit_group_store (temp_target, temp, TREE_TYPE (exp), size); | 6972 emit_group_store (temp_target, temp, TREE_TYPE (exp), size); |
6854 temp = temp_target; | 6973 temp = temp_target; |
6855 } | 6974 } |
6856 | 6975 |
6880 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp)); | 6999 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp)); |
6881 | 7000 |
6882 if (reverse) | 7001 if (reverse) |
6883 temp = flip_storage_order (temp_mode, temp); | 7002 temp = flip_storage_order (temp_mode, temp); |
6884 | 7003 |
6885 if (bitsize < size | 7004 gcc_checking_assert (known_le (bitsize, size)); |
7005 if (maybe_lt (bitsize, size) | |
6886 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN | 7006 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN |
6887 && !(mode == BLKmode && bitsize > BITS_PER_WORD)) | 7007 /* Use of to_constant for BLKmode was checked above. */ |
7008 && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD)) | |
6888 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp, | 7009 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp, |
6889 size - bitsize, NULL_RTX, 1); | 7010 size - bitsize, NULL_RTX, 1); |
6890 } | 7011 } |
6891 | 7012 |
6892 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */ | 7013 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */ |
6899 a block copy. Likewise for a BLKmode-like TARGET. */ | 7020 a block copy. Likewise for a BLKmode-like TARGET. */ |
6900 if (GET_MODE (temp) == BLKmode | 7021 if (GET_MODE (temp) == BLKmode |
6901 && (GET_MODE (target) == BLKmode | 7022 && (GET_MODE (target) == BLKmode |
6902 || (MEM_P (target) | 7023 || (MEM_P (target) |
6903 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT | 7024 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT |
6904 && (bitpos % BITS_PER_UNIT) == 0 | 7025 && multiple_p (bitpos, BITS_PER_UNIT) |
6905 && (bitsize % BITS_PER_UNIT) == 0))) | 7026 && multiple_p (bitsize, BITS_PER_UNIT)))) |
6906 { | 7027 { |
6907 gcc_assert (MEM_P (target) && MEM_P (temp) | 7028 gcc_assert (MEM_P (target) && MEM_P (temp)); |
6908 && (bitpos % BITS_PER_UNIT) == 0); | 7029 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT); |
6909 | 7030 poly_int64 bytesize = bits_to_bytes_round_up (bitsize); |
6910 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); | 7031 |
7032 target = adjust_address (target, VOIDmode, bytepos); | |
6911 emit_block_move (target, temp, | 7033 emit_block_move (target, temp, |
6912 GEN_INT ((bitsize + BITS_PER_UNIT - 1) | 7034 gen_int_mode (bytesize, Pmode), |
6913 / BITS_PER_UNIT), | |
6914 BLOCK_OP_NORMAL); | 7035 BLOCK_OP_NORMAL); |
6915 | 7036 |
6916 return const0_rtx; | 7037 return const0_rtx; |
6917 } | 7038 } |
6918 | 7039 |
6919 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the | 7040 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the |
6920 word size, we need to load the value (see again store_bit_field). */ | 7041 word size, we need to load the value (see again store_bit_field). */ |
6921 if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD) | 7042 if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD)) |
6922 { | 7043 { |
6923 scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize); | 7044 scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize); |
6924 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode, | 7045 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode, |
6925 temp_mode, false, NULL); | 7046 temp_mode, false, NULL); |
6926 } | 7047 } |
6927 | 7048 |
6928 /* Store the value in the bitfield. */ | 7049 /* Store the value in the bitfield. */ |
7050 gcc_checking_assert (known_ge (bitpos, 0)); | |
6929 store_bit_field (target, bitsize, bitpos, | 7051 store_bit_field (target, bitsize, bitpos, |
6930 bitregion_start, bitregion_end, | 7052 bitregion_start, bitregion_end, |
6931 mode, temp, reverse); | 7053 mode, temp, reverse); |
6932 | 7054 |
6933 return const0_rtx; | 7055 return const0_rtx; |
6934 } | 7056 } |
6935 else | 7057 else |
6936 { | 7058 { |
6937 /* Now build a reference to just the desired component. */ | 7059 /* Now build a reference to just the desired component. */ |
6938 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); | 7060 rtx to_rtx = adjust_address (target, mode, |
7061 exact_div (bitpos, BITS_PER_UNIT)); | |
6939 | 7062 |
6940 if (to_rtx == target) | 7063 if (to_rtx == target) |
6941 to_rtx = copy_rtx (to_rtx); | 7064 to_rtx = copy_rtx (to_rtx); |
6942 | 7065 |
6943 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) | 7066 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) |
6944 set_mem_alias_set (to_rtx, alias_set); | 7067 set_mem_alias_set (to_rtx, alias_set); |
6945 | 7068 |
6946 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR | 7069 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR |
6947 into a target smaller than its type; handle that case now. */ | 7070 into a target smaller than its type; handle that case now. */ |
6948 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0) | 7071 if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize)) |
6949 { | 7072 { |
6950 gcc_assert (bitsize % BITS_PER_UNIT == 0); | 7073 poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT); |
6951 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse); | 7074 store_constructor (exp, to_rtx, 0, bytesize, reverse); |
6952 return to_rtx; | 7075 return to_rtx; |
6953 } | 7076 } |
6954 | 7077 |
6955 return store_expr (exp, to_rtx, 0, nontemporal, reverse); | 7078 return store_expr (exp, to_rtx, 0, nontemporal, reverse); |
6956 } | 7079 } |
6977 If the field describes a variable-sized object, *PMODE is set to | 7100 If the field describes a variable-sized object, *PMODE is set to |
6978 BLKmode and *PBITSIZE is set to -1. An access cannot be made in | 7101 BLKmode and *PBITSIZE is set to -1. An access cannot be made in |
6979 this case, but the address of the object can be found. */ | 7102 this case, but the address of the object can be found. */ |
6980 | 7103 |
6981 tree | 7104 tree |
6982 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, | 7105 get_inner_reference (tree exp, poly_int64_pod *pbitsize, |
6983 HOST_WIDE_INT *pbitpos, tree *poffset, | 7106 poly_int64_pod *pbitpos, tree *poffset, |
6984 machine_mode *pmode, int *punsignedp, | 7107 machine_mode *pmode, int *punsignedp, |
6985 int *preversep, int *pvolatilep) | 7108 int *preversep, int *pvolatilep) |
6986 { | 7109 { |
6987 tree size_tree = 0; | 7110 tree size_tree = 0; |
6988 machine_mode mode = VOIDmode; | 7111 machine_mode mode = VOIDmode; |
6989 bool blkmode_bitfield = false; | 7112 bool blkmode_bitfield = false; |
6990 tree offset = size_zero_node; | 7113 tree offset = size_zero_node; |
6991 offset_int bit_offset = 0; | 7114 poly_offset_int bit_offset = 0; |
6992 | 7115 |
6993 /* First get the mode, signedness, storage order and size. We do this from | 7116 /* First get the mode, signedness, storage order and size. We do this from |
6994 just the outermost expression. */ | 7117 just the outermost expression. */ |
6995 *pbitsize = -1; | 7118 *pbitsize = -1; |
6996 if (TREE_CODE (exp) == COMPONENT_REF) | 7119 if (TREE_CODE (exp) == COMPONENT_REF) |
7004 /* Volatile bitfields should be accessed in the mode of the | 7127 /* Volatile bitfields should be accessed in the mode of the |
7005 field's type, not the mode computed based on the bit | 7128 field's type, not the mode computed based on the bit |
7006 size. */ | 7129 size. */ |
7007 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field)); | 7130 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field)); |
7008 else if (!DECL_BIT_FIELD (field)) | 7131 else if (!DECL_BIT_FIELD (field)) |
7009 mode = DECL_MODE (field); | 7132 { |
7133 mode = DECL_MODE (field); | |
7134 /* For vector fields re-check the target flags, as DECL_MODE | |
7135 could have been set with different target flags than | |
7136 the current function has. */ | |
7137 if (mode == BLKmode | |
7138 && VECTOR_TYPE_P (TREE_TYPE (field)) | |
7139 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field)))) | |
7140 mode = TYPE_MODE (TREE_TYPE (field)); | |
7141 } | |
7010 else if (DECL_MODE (field) == BLKmode) | 7142 else if (DECL_MODE (field) == BLKmode) |
7011 blkmode_bitfield = true; | 7143 blkmode_bitfield = true; |
7012 | 7144 |
7013 *punsignedp = DECL_UNSIGNED (field); | 7145 *punsignedp = DECL_UNSIGNED (field); |
7014 } | 7146 } |
7051 while (1) | 7183 while (1) |
7052 { | 7184 { |
7053 switch (TREE_CODE (exp)) | 7185 switch (TREE_CODE (exp)) |
7054 { | 7186 { |
7055 case BIT_FIELD_REF: | 7187 case BIT_FIELD_REF: |
7056 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2)); | 7188 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2)); |
7057 break; | 7189 break; |
7058 | 7190 |
7059 case COMPONENT_REF: | 7191 case COMPONENT_REF: |
7060 { | 7192 { |
7061 tree field = TREE_OPERAND (exp, 1); | 7193 tree field = TREE_OPERAND (exp, 1); |
7066 type construction. */ | 7198 type construction. */ |
7067 if (this_offset == 0) | 7199 if (this_offset == 0) |
7068 break; | 7200 break; |
7069 | 7201 |
7070 offset = size_binop (PLUS_EXPR, offset, this_offset); | 7202 offset = size_binop (PLUS_EXPR, offset, this_offset); |
7071 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)); | 7203 bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field)); |
7072 | 7204 |
7073 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ | 7205 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ |
7074 } | 7206 } |
7075 break; | 7207 break; |
7076 | 7208 |
7111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) | 7243 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) |
7112 { | 7244 { |
7113 tree off = TREE_OPERAND (exp, 1); | 7245 tree off = TREE_OPERAND (exp, 1); |
7114 if (!integer_zerop (off)) | 7246 if (!integer_zerop (off)) |
7115 { | 7247 { |
7116 offset_int boff, coff = mem_ref_offset (exp); | 7248 poly_offset_int boff = mem_ref_offset (exp); |
7117 boff = coff << LOG2_BITS_PER_UNIT; | 7249 boff <<= LOG2_BITS_PER_UNIT; |
7118 bit_offset += boff; | 7250 bit_offset += boff; |
7119 } | 7251 } |
7120 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | 7252 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
7121 } | 7253 } |
7122 goto done; | 7254 goto done; |
7134 done: | 7266 done: |
7135 | 7267 |
7136 /* If OFFSET is constant, see if we can return the whole thing as a | 7268 /* If OFFSET is constant, see if we can return the whole thing as a |
7137 constant bit position. Make sure to handle overflow during | 7269 constant bit position. Make sure to handle overflow during |
7138 this conversion. */ | 7270 this conversion. */ |
7139 if (TREE_CODE (offset) == INTEGER_CST) | 7271 if (poly_int_tree_p (offset)) |
7140 { | 7272 { |
7141 offset_int tem = wi::sext (wi::to_offset (offset), | 7273 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset), |
7142 TYPE_PRECISION (sizetype)); | 7274 TYPE_PRECISION (sizetype)); |
7143 tem <<= LOG2_BITS_PER_UNIT; | 7275 tem <<= LOG2_BITS_PER_UNIT; |
7144 tem += bit_offset; | 7276 tem += bit_offset; |
7145 if (wi::fits_shwi_p (tem)) | 7277 if (tem.to_shwi (pbitpos)) |
7146 { | 7278 *poffset = offset = NULL_TREE; |
7147 *pbitpos = tem.to_shwi (); | |
7148 *poffset = offset = NULL_TREE; | |
7149 } | |
7150 } | 7279 } |
7151 | 7280 |
7152 /* Otherwise, split it up. */ | 7281 /* Otherwise, split it up. */ |
7153 if (offset) | 7282 if (offset) |
7154 { | 7283 { |
7155 /* Avoid returning a negative bitpos as this may wreak havoc later. */ | 7284 /* Avoid returning a negative bitpos as this may wreak havoc later. */ |
7156 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset)) | 7285 if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0)) |
7157 { | 7286 { |
7158 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false); | 7287 *pbitpos = num_trailing_bits (bit_offset.force_shwi ()); |
7159 offset_int tem = wi::bit_and_not (bit_offset, mask); | 7288 poly_offset_int bytes = bits_to_bytes_round_down (bit_offset); |
7160 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf. | |
7161 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */ | |
7162 bit_offset -= tem; | |
7163 tem >>= LOG2_BITS_PER_UNIT; | |
7164 offset = size_binop (PLUS_EXPR, offset, | 7289 offset = size_binop (PLUS_EXPR, offset, |
7165 wide_int_to_tree (sizetype, tem)); | 7290 build_int_cst (sizetype, bytes.force_shwi ())); |
7166 } | 7291 } |
7167 | 7292 |
7168 *pbitpos = bit_offset.to_shwi (); | |
7169 *poffset = offset; | 7293 *poffset = offset; |
7170 } | 7294 } |
7171 | 7295 |
7172 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */ | 7296 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */ |
7173 if (mode == VOIDmode | 7297 if (mode == VOIDmode |
7174 && blkmode_bitfield | 7298 && blkmode_bitfield |
7175 && (*pbitpos % BITS_PER_UNIT) == 0 | 7299 && multiple_p (*pbitpos, BITS_PER_UNIT) |
7176 && (*pbitsize % BITS_PER_UNIT) == 0) | 7300 && multiple_p (*pbitsize, BITS_PER_UNIT)) |
7177 *pmode = BLKmode; | 7301 *pmode = BLKmode; |
7178 else | 7302 else |
7179 *pmode = mode; | 7303 *pmode = mode; |
7180 | 7304 |
7181 return exp; | 7305 return exp; |
7707 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode, | 7831 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode, |
7708 enum expand_modifier modifier, addr_space_t as) | 7832 enum expand_modifier modifier, addr_space_t as) |
7709 { | 7833 { |
7710 rtx result, subtarget; | 7834 rtx result, subtarget; |
7711 tree inner, offset; | 7835 tree inner, offset; |
7712 HOST_WIDE_INT bitsize, bitpos; | 7836 poly_int64 bitsize, bitpos; |
7713 int unsignedp, reversep, volatilep = 0; | 7837 int unsignedp, reversep, volatilep = 0; |
7714 machine_mode mode1; | 7838 machine_mode mode1; |
7715 | 7839 |
7716 /* If we are taking the address of a constant and are at the top level, | 7840 /* If we are taking the address of a constant and are at the top level, |
7717 we have to use output_constant_def since we can't call force_const_mem | 7841 we have to use output_constant_def since we can't call force_const_mem |
7739 tree tem = TREE_OPERAND (exp, 0); | 7863 tree tem = TREE_OPERAND (exp, 0); |
7740 if (!integer_zerop (TREE_OPERAND (exp, 1))) | 7864 if (!integer_zerop (TREE_OPERAND (exp, 1))) |
7741 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1)); | 7865 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1)); |
7742 return expand_expr (tem, target, tmode, modifier); | 7866 return expand_expr (tem, target, tmode, modifier); |
7743 } | 7867 } |
7868 | |
7869 case TARGET_MEM_REF: | |
7870 return addr_for_mem_ref (exp, as, true); | |
7744 | 7871 |
7745 case CONST_DECL: | 7872 case CONST_DECL: |
7746 /* Expand the initializer like constants above. */ | 7873 /* Expand the initializer like constants above. */ |
7747 result = XEXP (expand_expr_constant (DECL_INITIAL (exp), | 7874 result = XEXP (expand_expr_constant (DECL_INITIAL (exp), |
7748 0, modifier), 0); | 7875 0, modifier), 0); |
7823 } | 7950 } |
7824 | 7951 |
7825 /* We must have made progress. */ | 7952 /* We must have made progress. */ |
7826 gcc_assert (inner != exp); | 7953 gcc_assert (inner != exp); |
7827 | 7954 |
7828 subtarget = offset || bitpos ? NULL_RTX : target; | 7955 subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target; |
7829 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than | 7956 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than |
7830 inner alignment, force the inner to be sufficiently aligned. */ | 7957 inner alignment, force the inner to be sufficiently aligned. */ |
7831 if (CONSTANT_CLASS_P (inner) | 7958 if (CONSTANT_CLASS_P (inner) |
7832 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp))) | 7959 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp))) |
7833 { | 7960 { |
7858 | 7985 |
7859 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | 7986 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
7860 result = simplify_gen_binary (PLUS, tmode, result, tmp); | 7987 result = simplify_gen_binary (PLUS, tmode, result, tmp); |
7861 else | 7988 else |
7862 { | 7989 { |
7863 subtarget = bitpos ? NULL_RTX : target; | 7990 subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target; |
7864 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget, | 7991 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget, |
7865 1, OPTAB_LIB_WIDEN); | 7992 1, OPTAB_LIB_WIDEN); |
7866 } | 7993 } |
7867 } | 7994 } |
7868 | 7995 |
7869 if (bitpos) | 7996 if (maybe_ne (bitpos, 0)) |
7870 { | 7997 { |
7871 /* Someone beforehand should have rejected taking the address | 7998 /* Someone beforehand should have rejected taking the address |
7872 of such an object. */ | 7999 of an object that isn't byte-aligned. */ |
7873 gcc_assert ((bitpos % BITS_PER_UNIT) == 0); | 8000 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT); |
7874 | |
7875 result = convert_memory_address_addr_space (tmode, result, as); | 8001 result = convert_memory_address_addr_space (tmode, result, as); |
7876 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT); | 8002 result = plus_constant (tmode, result, bytepos); |
7877 if (modifier < EXPAND_SUM) | 8003 if (modifier < EXPAND_SUM) |
7878 result = force_operand (result, target); | 8004 result = force_operand (result, target); |
7879 } | 8005 } |
7880 | 8006 |
7881 return result; | 8007 return result; |
8294 { | 8420 { |
8295 gcc_assert (REG_P (target) | 8421 gcc_assert (REG_P (target) |
8296 && !TYPE_REVERSE_STORAGE_ORDER (type)); | 8422 && !TYPE_REVERSE_STORAGE_ORDER (type)); |
8297 | 8423 |
8298 /* Store this field into a union of the proper type. */ | 8424 /* Store this field into a union of the proper type. */ |
8425 poly_uint64 op0_size | |
8426 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0))); | |
8427 poly_uint64 union_size = GET_MODE_BITSIZE (mode); | |
8299 store_field (target, | 8428 store_field (target, |
8300 MIN ((int_size_in_bytes (TREE_TYPE | 8429 /* The conversion must be constructed so that |
8301 (treeop0)) | 8430 we know at compile time how many bits |
8302 * BITS_PER_UNIT), | 8431 to preserve. */ |
8303 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), | 8432 ordered_min (op0_size, union_size), |
8304 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, | 8433 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, |
8305 false, false); | 8434 false, false); |
8306 } | 8435 } |
8307 | 8436 |
8308 /* Return the entire union. */ | 8437 /* Return the entire union. */ |
8547 expand_operands (treeop0, treeop1, | 8676 expand_operands (treeop0, treeop1, |
8548 subtarget, &op0, &op1, modifier); | 8677 subtarget, &op0, &op1, modifier); |
8549 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); | 8678 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
8550 | 8679 |
8551 case MINUS_EXPR: | 8680 case MINUS_EXPR: |
8681 case POINTER_DIFF_EXPR: | |
8552 do_minus: | 8682 do_minus: |
8553 /* For initializers, we are allowed to return a MINUS of two | 8683 /* For initializers, we are allowed to return a MINUS of two |
8554 symbolic constants. Here we handle all cases when both operands | 8684 symbolic constants. Here we handle all cases when both operands |
8555 are constant. */ | 8685 are constant. */ |
8556 /* Handle difference of two symbolic constants, | 8686 /* Handle difference of two symbolic constants, |
8605 && (TYPE_UNSIGNED (TREE_TYPE (treeop0)) | 8735 && (TYPE_UNSIGNED (TREE_TYPE (treeop0)) |
8606 != TYPE_UNSIGNED (TREE_TYPE (treeop1)))) | 8736 != TYPE_UNSIGNED (TREE_TYPE (treeop1)))) |
8607 { | 8737 { |
8608 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0)); | 8738 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0)); |
8609 this_optab = usmul_widen_optab; | 8739 this_optab = usmul_widen_optab; |
8610 if (find_widening_optab_handler (this_optab, mode, innermode, 0) | 8740 if (find_widening_optab_handler (this_optab, mode, innermode) |
8611 != CODE_FOR_nothing) | 8741 != CODE_FOR_nothing) |
8612 { | 8742 { |
8613 if (TYPE_UNSIGNED (TREE_TYPE (treeop0))) | 8743 if (TYPE_UNSIGNED (TREE_TYPE (treeop0))) |
8614 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, | 8744 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, |
8615 EXPAND_NORMAL); | 8745 EXPAND_NORMAL); |
8640 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; | 8770 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; |
8641 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; | 8771 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; |
8642 | 8772 |
8643 if (TREE_CODE (treeop0) != INTEGER_CST) | 8773 if (TREE_CODE (treeop0) != INTEGER_CST) |
8644 { | 8774 { |
8645 if (find_widening_optab_handler (this_optab, mode, innermode, 0) | 8775 if (find_widening_optab_handler (this_optab, mode, innermode) |
8646 != CODE_FOR_nothing) | 8776 != CODE_FOR_nothing) |
8647 { | 8777 { |
8648 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, | 8778 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, |
8649 EXPAND_NORMAL); | 8779 EXPAND_NORMAL); |
8650 /* op0 and op1 might still be constant, despite the above | 8780 /* op0 and op1 might still be constant, despite the above |
8662 } | 8792 } |
8663 temp = expand_widening_mult (mode, op0, op1, target, | 8793 temp = expand_widening_mult (mode, op0, op1, target, |
8664 unsignedp, this_optab); | 8794 unsignedp, this_optab); |
8665 return REDUCE_BIT_FIELD (temp); | 8795 return REDUCE_BIT_FIELD (temp); |
8666 } | 8796 } |
8667 if (find_widening_optab_handler (other_optab, mode, innermode, 0) | 8797 if (find_widening_optab_handler (other_optab, mode, innermode) |
8668 != CODE_FOR_nothing | 8798 != CODE_FOR_nothing |
8669 && innermode == word_mode) | 8799 && innermode == word_mode) |
8670 { | 8800 { |
8671 rtx htem, hipart; | 8801 rtx htem, hipart; |
8672 op0 = expand_normal (treeop0); | 8802 op0 = expand_normal (treeop0); |
8694 } | 8824 } |
8695 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0); | 8825 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0); |
8696 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1); | 8826 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1); |
8697 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL); | 8827 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL); |
8698 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); | 8828 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); |
8699 | |
8700 case FMA_EXPR: | |
8701 { | |
8702 optab opt = fma_optab; | |
8703 gimple *def0, *def2; | |
8704 | |
8705 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l} | |
8706 call. */ | |
8707 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing) | |
8708 { | |
8709 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA); | |
8710 tree call_expr; | |
8711 | |
8712 gcc_assert (fn != NULL_TREE); | |
8713 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2); | |
8714 return expand_builtin (call_expr, target, subtarget, mode, false); | |
8715 } | |
8716 | |
8717 def0 = get_def_for_expr (treeop0, NEGATE_EXPR); | |
8718 /* The multiplication is commutative - look at its 2nd operand | |
8719 if the first isn't fed by a negate. */ | |
8720 if (!def0) | |
8721 { | |
8722 def0 = get_def_for_expr (treeop1, NEGATE_EXPR); | |
8723 /* Swap operands if the 2nd operand is fed by a negate. */ | |
8724 if (def0) | |
8725 std::swap (treeop0, treeop1); | |
8726 } | |
8727 def2 = get_def_for_expr (treeop2, NEGATE_EXPR); | |
8728 | |
8729 op0 = op2 = NULL; | |
8730 | |
8731 if (def0 && def2 | |
8732 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing) | |
8733 { | |
8734 opt = fnms_optab; | |
8735 op0 = expand_normal (gimple_assign_rhs1 (def0)); | |
8736 op2 = expand_normal (gimple_assign_rhs1 (def2)); | |
8737 } | |
8738 else if (def0 | |
8739 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing) | |
8740 { | |
8741 opt = fnma_optab; | |
8742 op0 = expand_normal (gimple_assign_rhs1 (def0)); | |
8743 } | |
8744 else if (def2 | |
8745 && optab_handler (fms_optab, mode) != CODE_FOR_nothing) | |
8746 { | |
8747 opt = fms_optab; | |
8748 op2 = expand_normal (gimple_assign_rhs1 (def2)); | |
8749 } | |
8750 | |
8751 if (op0 == NULL) | |
8752 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL); | |
8753 if (op2 == NULL) | |
8754 op2 = expand_normal (treeop2); | |
8755 op1 = expand_normal (treeop1); | |
8756 | |
8757 return expand_ternary_op (TYPE_MODE (type), opt, | |
8758 op0, op1, op2, target, 0); | |
8759 } | |
8760 | 8829 |
8761 case MULT_EXPR: | 8830 case MULT_EXPR: |
8762 /* If this is a fixed-point operation, then we cannot use the code | 8831 /* If this is a fixed-point operation, then we cannot use the code |
8763 below because "expand_mult" doesn't support sat/no-sat fixed-point | 8832 below because "expand_mult" doesn't support sat/no-sat fixed-point |
8764 multiplications. */ | 8833 multiplications. */ |
8916 op0, target, 0); | 8985 op0, target, 0); |
8917 gcc_assert (temp); | 8986 gcc_assert (temp); |
8918 return REDUCE_BIT_FIELD (temp); | 8987 return REDUCE_BIT_FIELD (temp); |
8919 | 8988 |
8920 case ABS_EXPR: | 8989 case ABS_EXPR: |
8990 case ABSU_EXPR: | |
8921 op0 = expand_expr (treeop0, subtarget, | 8991 op0 = expand_expr (treeop0, subtarget, |
8922 VOIDmode, EXPAND_NORMAL); | 8992 VOIDmode, EXPAND_NORMAL); |
8923 if (modifier == EXPAND_STACK_PARM) | 8993 if (modifier == EXPAND_STACK_PARM) |
8924 target = 0; | 8994 target = 0; |
8925 | 8995 |
8927 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | 8997 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT |
8928 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); | 8998 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); |
8929 | 8999 |
8930 /* Unsigned abs is simply the operand. Testing here means we don't | 9000 /* Unsigned abs is simply the operand. Testing here means we don't |
8931 risk generating incorrect code below. */ | 9001 risk generating incorrect code below. */ |
8932 if (TYPE_UNSIGNED (type)) | 9002 if (TYPE_UNSIGNED (TREE_TYPE (treeop0))) |
8933 return op0; | 9003 return op0; |
8934 | 9004 |
8935 return expand_abs (mode, op0, target, unsignedp, | 9005 return expand_abs (mode, op0, target, unsignedp, |
8936 safe_from_p (target, treeop0, 1)); | 9006 safe_from_p (target, treeop0, 1)); |
8937 | 9007 |
9175 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode) | 9245 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode) |
9176 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode)) | 9246 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode)) |
9177 >= GET_MODE_BITSIZE (word_mode))) | 9247 >= GET_MODE_BITSIZE (word_mode))) |
9178 { | 9248 { |
9179 rtx_insn *seq, *seq_old; | 9249 rtx_insn *seq, *seq_old; |
9180 unsigned int high_off = subreg_highpart_offset (word_mode, | 9250 poly_uint64 high_off = subreg_highpart_offset (word_mode, |
9181 int_mode); | 9251 int_mode); |
9182 bool extend_unsigned | 9252 bool extend_unsigned |
9183 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def))); | 9253 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def))); |
9184 rtx low = lowpart_subreg (word_mode, op0, int_mode); | 9254 rtx low = lowpart_subreg (word_mode, op0, int_mode); |
9185 rtx dest_low = lowpart_subreg (word_mode, target, int_mode); | 9255 rtx dest_low = lowpart_subreg (word_mode, target, int_mode); |
9186 rtx dest_high = simplify_gen_subreg (word_mode, target, | 9256 rtx dest_high = simplify_gen_subreg (word_mode, target, |
9359 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1, | 9429 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1, |
9360 target, unsignedp); | 9430 target, unsignedp); |
9361 return target; | 9431 return target; |
9362 } | 9432 } |
9363 | 9433 |
9364 case REDUC_MAX_EXPR: | |
9365 case REDUC_MIN_EXPR: | |
9366 case REDUC_PLUS_EXPR: | |
9367 { | |
9368 op0 = expand_normal (treeop0); | |
9369 this_optab = optab_for_tree_code (code, type, optab_default); | |
9370 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0)); | |
9371 | |
9372 struct expand_operand ops[2]; | |
9373 enum insn_code icode = optab_handler (this_optab, vec_mode); | |
9374 | |
9375 create_output_operand (&ops[0], target, mode); | |
9376 create_input_operand (&ops[1], op0, vec_mode); | |
9377 expand_insn (icode, 2, ops); | |
9378 target = ops[0].value; | |
9379 if (GET_MODE (target) != mode) | |
9380 return gen_lowpart (tmode, target); | |
9381 return target; | |
9382 } | |
9383 | |
9384 case VEC_UNPACK_HI_EXPR: | 9434 case VEC_UNPACK_HI_EXPR: |
9385 case VEC_UNPACK_LO_EXPR: | 9435 case VEC_UNPACK_LO_EXPR: |
9436 case VEC_UNPACK_FIX_TRUNC_HI_EXPR: | |
9437 case VEC_UNPACK_FIX_TRUNC_LO_EXPR: | |
9386 { | 9438 { |
9387 op0 = expand_normal (treeop0); | 9439 op0 = expand_normal (treeop0); |
9388 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX, | 9440 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX, |
9389 target, unsignedp); | 9441 target, unsignedp); |
9390 gcc_assert (temp); | 9442 gcc_assert (temp); |
9420 case VEC_PACK_SAT_EXPR: | 9472 case VEC_PACK_SAT_EXPR: |
9421 case VEC_PACK_FIX_TRUNC_EXPR: | 9473 case VEC_PACK_FIX_TRUNC_EXPR: |
9422 mode = TYPE_MODE (TREE_TYPE (treeop0)); | 9474 mode = TYPE_MODE (TREE_TYPE (treeop0)); |
9423 goto binop; | 9475 goto binop; |
9424 | 9476 |
9477 case VEC_PACK_FLOAT_EXPR: | |
9478 mode = TYPE_MODE (TREE_TYPE (treeop0)); | |
9479 expand_operands (treeop0, treeop1, | |
9480 subtarget, &op0, &op1, EXPAND_NORMAL); | |
9481 this_optab = optab_for_tree_code (code, TREE_TYPE (treeop0), | |
9482 optab_default); | |
9483 target = expand_binop (mode, this_optab, op0, op1, target, | |
9484 TYPE_UNSIGNED (TREE_TYPE (treeop0)), | |
9485 OPTAB_LIB_WIDEN); | |
9486 gcc_assert (target); | |
9487 return target; | |
9488 | |
9425 case VEC_PERM_EXPR: | 9489 case VEC_PERM_EXPR: |
9426 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL); | 9490 { |
9427 op2 = expand_normal (treeop2); | 9491 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL); |
9428 | 9492 vec_perm_builder sel; |
9429 /* Careful here: if the target doesn't support integral vector modes, | 9493 if (TREE_CODE (treeop2) == VECTOR_CST |
9430 a constant selection vector could wind up smooshed into a normal | 9494 && tree_to_vec_perm_builder (&sel, treeop2)) |
9431 integral constant. */ | 9495 { |
9432 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR) | 9496 machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2)); |
9433 { | 9497 temp = expand_vec_perm_const (mode, op0, op1, sel, |
9434 tree sel_type = TREE_TYPE (treeop2); | 9498 sel_mode, target); |
9435 machine_mode vmode | 9499 } |
9436 = mode_for_vector (SCALAR_TYPE_MODE (TREE_TYPE (sel_type)), | 9500 else |
9437 TYPE_VECTOR_SUBPARTS (sel_type)).require (); | 9501 { |
9438 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT); | 9502 op2 = expand_normal (treeop2); |
9439 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0); | 9503 temp = expand_vec_perm_var (mode, op0, op1, op2, target); |
9440 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR); | 9504 } |
9441 } | 9505 gcc_assert (temp); |
9442 else | 9506 return temp; |
9443 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT); | 9507 } |
9444 | |
9445 temp = expand_vec_perm (mode, op0, op1, op2, target); | |
9446 gcc_assert (temp); | |
9447 return temp; | |
9448 | 9508 |
9449 case DOT_PROD_EXPR: | 9509 case DOT_PROD_EXPR: |
9450 { | 9510 { |
9451 tree oprnd0 = treeop0; | 9511 tree oprnd0 = treeop0; |
9452 tree oprnd1 = treeop1; | 9512 tree oprnd1 = treeop1; |
9548 } | 9608 } |
9549 | 9609 |
9550 case VEC_COND_EXPR: | 9610 case VEC_COND_EXPR: |
9551 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target); | 9611 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target); |
9552 return target; | 9612 return target; |
9613 | |
9614 case VEC_DUPLICATE_EXPR: | |
9615 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier); | |
9616 target = expand_vector_broadcast (mode, op0); | |
9617 gcc_assert (target); | |
9618 return target; | |
9619 | |
9620 case VEC_SERIES_EXPR: | |
9621 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier); | |
9622 return expand_vec_series_expr (mode, op0, op1, target); | |
9553 | 9623 |
9554 case BIT_INSERT_EXPR: | 9624 case BIT_INSERT_EXPR: |
9555 { | 9625 { |
9556 unsigned bitpos = tree_to_uhwi (treeop2); | 9626 unsigned bitpos = tree_to_uhwi (treeop2); |
9557 unsigned bitsize; | 9627 unsigned bitsize; |
9948 } | 10018 } |
9949 | 10019 |
9950 case VECTOR_CST: | 10020 case VECTOR_CST: |
9951 { | 10021 { |
9952 tree tmp = NULL_TREE; | 10022 tree tmp = NULL_TREE; |
9953 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT | 10023 if (VECTOR_MODE_P (mode)) |
9954 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT | |
9955 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT | |
9956 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT | |
9957 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM | |
9958 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM) | |
9959 return const_vector_from_tree (exp); | 10024 return const_vector_from_tree (exp); |
9960 scalar_int_mode int_mode; | 10025 scalar_int_mode int_mode; |
9961 if (is_int_mode (mode, &int_mode)) | 10026 if (is_int_mode (mode, &int_mode)) |
9962 { | 10027 { |
9963 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp))) | 10028 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp))) |
9972 } | 10037 } |
9973 } | 10038 } |
9974 if (!tmp) | 10039 if (!tmp) |
9975 { | 10040 { |
9976 vec<constructor_elt, va_gc> *v; | 10041 vec<constructor_elt, va_gc> *v; |
9977 unsigned i; | 10042 /* Constructors need to be fixed-length. FIXME. */ |
9978 vec_alloc (v, VECTOR_CST_NELTS (exp)); | 10043 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant (); |
9979 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i) | 10044 vec_alloc (v, nunits); |
10045 for (unsigned int i = 0; i < nunits; ++i) | |
9980 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i)); | 10046 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i)); |
9981 tmp = build_constructor (type, v); | 10047 tmp = build_constructor (type, v); |
9982 } | 10048 } |
9983 return expand_expr (tmp, ignore ? const0_rtx : target, | 10049 return expand_expr (tmp, ignore ? const0_rtx : target, |
9984 tmode, modifier); | 10050 tmode, modifier); |
10054 && ! memory_address_addr_space_p (mode, XEXP (temp, 0), | 10120 && ! memory_address_addr_space_p (mode, XEXP (temp, 0), |
10055 MEM_ADDR_SPACE (temp))) | 10121 MEM_ADDR_SPACE (temp))) |
10056 return replace_equiv_address (temp, | 10122 return replace_equiv_address (temp, |
10057 copy_rtx (XEXP (temp, 0))); | 10123 copy_rtx (XEXP (temp, 0))); |
10058 return temp; | 10124 return temp; |
10125 | |
10126 case POLY_INT_CST: | |
10127 return immed_wide_int_const (poly_int_cst_value (exp), mode); | |
10059 | 10128 |
10060 case SAVE_EXPR: | 10129 case SAVE_EXPR: |
10061 { | 10130 { |
10062 tree val = treeop0; | 10131 tree val = treeop0; |
10063 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl, | 10132 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl, |
10151 unsigned align; | 10220 unsigned align; |
10152 /* Handle expansion of non-aliased memory with non-BLKmode. That | 10221 /* Handle expansion of non-aliased memory with non-BLKmode. That |
10153 might end up in a register. */ | 10222 might end up in a register. */ |
10154 if (mem_ref_refers_to_non_mem_p (exp)) | 10223 if (mem_ref_refers_to_non_mem_p (exp)) |
10155 { | 10224 { |
10156 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr (); | 10225 poly_int64 offset = mem_ref_offset (exp).force_shwi (); |
10157 base = TREE_OPERAND (base, 0); | 10226 base = TREE_OPERAND (base, 0); |
10158 if (offset == 0 | 10227 poly_uint64 type_size; |
10228 if (known_eq (offset, 0) | |
10159 && !reverse | 10229 && !reverse |
10160 && tree_fits_uhwi_p (TYPE_SIZE (type)) | 10230 && poly_int_tree_p (TYPE_SIZE (type), &type_size) |
10161 && (GET_MODE_BITSIZE (DECL_MODE (base)) | 10231 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base)), type_size)) |
10162 == tree_to_uhwi (TYPE_SIZE (type)))) | |
10163 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base), | 10232 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base), |
10164 target, tmode, modifier); | 10233 target, tmode, modifier); |
10165 if (TYPE_MODE (type) == BLKmode) | 10234 if (TYPE_MODE (type) == BLKmode) |
10166 { | 10235 { |
10167 temp = assign_stack_temp (DECL_MODE (base), | 10236 temp = assign_stack_temp (DECL_MODE (base), |
10432 case BIT_FIELD_REF: | 10501 case BIT_FIELD_REF: |
10433 case ARRAY_RANGE_REF: | 10502 case ARRAY_RANGE_REF: |
10434 normal_inner_ref: | 10503 normal_inner_ref: |
10435 { | 10504 { |
10436 machine_mode mode1, mode2; | 10505 machine_mode mode1, mode2; |
10437 HOST_WIDE_INT bitsize, bitpos; | 10506 poly_int64 bitsize, bitpos, bytepos; |
10438 tree offset; | 10507 tree offset; |
10439 int reversep, volatilep = 0, must_force_mem; | 10508 int reversep, volatilep = 0, must_force_mem; |
10440 tree tem | 10509 tree tem |
10441 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1, | 10510 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1, |
10442 &unsignedp, &reversep, &volatilep); | 10511 &unsignedp, &reversep, &volatilep); |
10476 } | 10545 } |
10477 | 10546 |
10478 mode2 | 10547 mode2 |
10479 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0); | 10548 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0); |
10480 | 10549 |
10550 /* Make sure bitpos is not negative, it can wreak havoc later. */ | |
10551 if (maybe_lt (bitpos, 0)) | |
10552 { | |
10553 gcc_checking_assert (offset == NULL_TREE); | |
10554 offset = size_int (bits_to_bytes_round_down (bitpos)); | |
10555 bitpos = num_trailing_bits (bitpos); | |
10556 } | |
10557 | |
10481 /* If we have either an offset, a BLKmode result, or a reference | 10558 /* If we have either an offset, a BLKmode result, or a reference |
10482 outside the underlying object, we must force it to memory. | 10559 outside the underlying object, we must force it to memory. |
10483 Such a case can occur in Ada if we have unchecked conversion | 10560 Such a case can occur in Ada if we have unchecked conversion |
10484 of an expression from a scalar type to an aggregate type or | 10561 of an expression from a scalar type to an aggregate type or |
10485 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were | 10562 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were |
10486 passed a partially uninitialized object or a view-conversion | 10563 passed a partially uninitialized object or a view-conversion |
10487 to a larger size. */ | 10564 to a larger size. */ |
10488 must_force_mem = (offset | 10565 must_force_mem = (offset |
10489 || mode1 == BLKmode | 10566 || mode1 == BLKmode |
10490 || bitpos + bitsize > GET_MODE_BITSIZE (mode2)); | 10567 || (mode == BLKmode |
10568 && !int_mode_for_size (bitsize, 1).exists ()) | |
10569 || maybe_gt (bitpos + bitsize, | |
10570 GET_MODE_BITSIZE (mode2))); | |
10491 | 10571 |
10492 /* Handle CONCAT first. */ | 10572 /* Handle CONCAT first. */ |
10493 if (GET_CODE (op0) == CONCAT && !must_force_mem) | 10573 if (GET_CODE (op0) == CONCAT && !must_force_mem) |
10494 { | 10574 { |
10495 if (bitpos == 0 | 10575 if (known_eq (bitpos, 0) |
10496 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)) | 10576 && known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0))) |
10497 && COMPLEX_MODE_P (mode1) | 10577 && COMPLEX_MODE_P (mode1) |
10498 && COMPLEX_MODE_P (GET_MODE (op0)) | 10578 && COMPLEX_MODE_P (GET_MODE (op0)) |
10499 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1)) | 10579 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1)) |
10500 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0))))) | 10580 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0))))) |
10501 { | 10581 { |
10523 } | 10603 } |
10524 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]); | 10604 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]); |
10525 } | 10605 } |
10526 return op0; | 10606 return op0; |
10527 } | 10607 } |
10528 if (bitpos == 0 | 10608 if (known_eq (bitpos, 0) |
10529 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | 10609 && known_eq (bitsize, |
10530 && bitsize) | 10610 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))) |
10611 && maybe_ne (bitsize, 0)) | |
10531 { | 10612 { |
10532 op0 = XEXP (op0, 0); | 10613 op0 = XEXP (op0, 0); |
10533 mode2 = GET_MODE (op0); | 10614 mode2 = GET_MODE (op0); |
10534 } | 10615 } |
10535 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | 10616 else if (known_eq (bitpos, |
10536 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))) | 10617 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))) |
10537 && bitpos | 10618 && known_eq (bitsize, |
10538 && bitsize) | 10619 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))) |
10620 && maybe_ne (bitpos, 0) | |
10621 && maybe_ne (bitsize, 0)) | |
10539 { | 10622 { |
10540 op0 = XEXP (op0, 1); | 10623 op0 = XEXP (op0, 1); |
10541 bitpos = 0; | 10624 bitpos = 0; |
10542 mode2 = GET_MODE (op0); | 10625 mode2 = GET_MODE (op0); |
10543 } | 10626 } |
10588 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); | 10671 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); |
10589 } | 10672 } |
10590 | 10673 |
10591 /* See the comment in expand_assignment for the rationale. */ | 10674 /* See the comment in expand_assignment for the rationale. */ |
10592 if (mode1 != VOIDmode | 10675 if (mode1 != VOIDmode |
10593 && bitpos != 0 | 10676 && maybe_ne (bitpos, 0) |
10594 && bitsize > 0 | 10677 && maybe_gt (bitsize, 0) |
10595 && (bitpos % bitsize) == 0 | 10678 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos) |
10596 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 | 10679 && multiple_p (bitpos, bitsize) |
10680 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1)) | |
10597 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1)) | 10681 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1)) |
10598 { | 10682 { |
10599 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); | 10683 op0 = adjust_address (op0, mode1, bytepos); |
10600 bitpos = 0; | 10684 bitpos = 0; |
10601 } | 10685 } |
10602 | 10686 |
10603 op0 = offset_address (op0, offset_rtx, | 10687 op0 = offset_address (op0, offset_rtx, |
10604 highest_pow2_factor (offset)); | 10688 highest_pow2_factor (offset)); |
10605 } | 10689 } |
10606 | 10690 |
10607 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, | 10691 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, |
10608 record its alignment as BIGGEST_ALIGNMENT. */ | 10692 record its alignment as BIGGEST_ALIGNMENT. */ |
10609 if (MEM_P (op0) && bitpos == 0 && offset != 0 | 10693 if (MEM_P (op0) |
10694 && known_eq (bitpos, 0) | |
10695 && offset != 0 | |
10610 && is_aligning_offset (offset, tem)) | 10696 && is_aligning_offset (offset, tem)) |
10611 set_mem_align (op0, BIGGEST_ALIGNMENT); | 10697 set_mem_align (op0, BIGGEST_ALIGNMENT); |
10612 | 10698 |
10613 /* Don't forget about volatility even if this is a bitfield. */ | 10699 /* Don't forget about volatility even if this is a bitfield. */ |
10614 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) | 10700 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) |
10637 is narrower than the access size of the bitfield, | 10723 is narrower than the access size of the bitfield, |
10638 we need to extract bitfields from the access. */ | 10724 we need to extract bitfields from the access. */ |
10639 || (volatilep && TREE_CODE (exp) == COMPONENT_REF | 10725 || (volatilep && TREE_CODE (exp) == COMPONENT_REF |
10640 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)) | 10726 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)) |
10641 && mode1 != BLKmode | 10727 && mode1 != BLKmode |
10642 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT) | 10728 && maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT)) |
10643 /* If the field isn't aligned enough to fetch as a memref, | 10729 /* If the field isn't aligned enough to fetch as a memref, |
10644 fetch it as a bit field. */ | 10730 fetch it as a bit field. */ |
10645 || (mode1 != BLKmode | 10731 || (mode1 != BLKmode |
10646 && (((MEM_P (op0) | 10732 && (((MEM_P (op0) |
10647 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) | 10733 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) |
10648 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0) | 10734 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1)) |
10649 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) | 10735 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) |
10650 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)) | 10736 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode))) |
10651 && modifier != EXPAND_MEMORY | 10737 && modifier != EXPAND_MEMORY |
10652 && ((modifier == EXPAND_CONST_ADDRESS | 10738 && ((modifier == EXPAND_CONST_ADDRESS |
10653 || modifier == EXPAND_INITIALIZER) | 10739 || modifier == EXPAND_INITIALIZER) |
10654 ? STRICT_ALIGNMENT | 10740 ? STRICT_ALIGNMENT |
10655 : targetm.slow_unaligned_access (mode1, | 10741 : targetm.slow_unaligned_access (mode1, |
10656 MEM_ALIGN (op0)))) | 10742 MEM_ALIGN (op0)))) |
10657 || (bitpos % BITS_PER_UNIT != 0))) | 10743 || !multiple_p (bitpos, BITS_PER_UNIT))) |
10658 /* If the type and the field are a constant size and the | 10744 /* If the type and the field are a constant size and the |
10659 size of the type isn't the same size as the bitfield, | 10745 size of the type isn't the same size as the bitfield, |
10660 we must use bitfield operations. */ | 10746 we must use bitfield operations. */ |
10661 || (bitsize >= 0 | 10747 || (known_size_p (bitsize) |
10662 && TYPE_SIZE (TREE_TYPE (exp)) | 10748 && TYPE_SIZE (TREE_TYPE (exp)) |
10663 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | 10749 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))) |
10664 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), | 10750 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))), |
10665 bitsize))) | 10751 bitsize))) |
10666 { | 10752 { |
10667 machine_mode ext_mode = mode; | 10753 machine_mode ext_mode = mode; |
10668 | 10754 |
10669 if (ext_mode == BLKmode | 10755 if (ext_mode == BLKmode |
10670 && ! (target != 0 && MEM_P (op0) | 10756 && ! (target != 0 && MEM_P (op0) |
10671 && MEM_P (target) | 10757 && MEM_P (target) |
10672 && bitpos % BITS_PER_UNIT == 0)) | 10758 && multiple_p (bitpos, BITS_PER_UNIT))) |
10673 ext_mode = int_mode_for_size (bitsize, 1).else_blk (); | 10759 ext_mode = int_mode_for_size (bitsize, 1).else_blk (); |
10674 | 10760 |
10675 if (ext_mode == BLKmode) | 10761 if (ext_mode == BLKmode) |
10676 { | 10762 { |
10677 if (target == 0) | 10763 if (target == 0) |
10678 target = assign_temp (type, 1, 1); | 10764 target = assign_temp (type, 1, 1); |
10679 | 10765 |
10680 /* ??? Unlike the similar test a few lines below, this one is | 10766 /* ??? Unlike the similar test a few lines below, this one is |
10681 very likely obsolete. */ | 10767 very likely obsolete. */ |
10682 if (bitsize == 0) | 10768 if (known_eq (bitsize, 0)) |
10683 return target; | 10769 return target; |
10684 | 10770 |
10685 /* In this case, BITPOS must start at a byte boundary and | 10771 /* In this case, BITPOS must start at a byte boundary and |
10686 TARGET, if specified, must be a MEM. */ | 10772 TARGET, if specified, must be a MEM. */ |
10687 gcc_assert (MEM_P (op0) | 10773 gcc_assert (MEM_P (op0) |
10688 && (!target || MEM_P (target)) | 10774 && (!target || MEM_P (target))); |
10689 && !(bitpos % BITS_PER_UNIT)); | 10775 |
10690 | 10776 bytepos = exact_div (bitpos, BITS_PER_UNIT); |
10777 poly_int64 bytesize = bits_to_bytes_round_up (bitsize); | |
10691 emit_block_move (target, | 10778 emit_block_move (target, |
10692 adjust_address (op0, VOIDmode, | 10779 adjust_address (op0, VOIDmode, bytepos), |
10693 bitpos / BITS_PER_UNIT), | 10780 gen_int_mode (bytesize, Pmode), |
10694 GEN_INT ((bitsize + BITS_PER_UNIT - 1) | |
10695 / BITS_PER_UNIT), | |
10696 (modifier == EXPAND_STACK_PARM | 10781 (modifier == EXPAND_STACK_PARM |
10697 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | 10782 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
10698 | 10783 |
10699 return target; | 10784 return target; |
10700 } | 10785 } |
10701 | 10786 |
10702 /* If we have nothing to extract, the result will be 0 for targets | 10787 /* If we have nothing to extract, the result will be 0 for targets |
10703 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always | 10788 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always |
10704 return 0 for the sake of consistency, as reading a zero-sized | 10789 return 0 for the sake of consistency, as reading a zero-sized |
10705 bitfield is valid in Ada and the value is fully specified. */ | 10790 bitfield is valid in Ada and the value is fully specified. */ |
10706 if (bitsize == 0) | 10791 if (known_eq (bitsize, 0)) |
10707 return const0_rtx; | 10792 return const0_rtx; |
10708 | 10793 |
10709 op0 = validize_mem (op0); | 10794 op0 = validize_mem (op0); |
10710 | 10795 |
10711 if (MEM_P (op0) && REG_P (XEXP (op0, 0))) | 10796 if (MEM_P (op0) && REG_P (XEXP (op0, 0))) |
10718 order and put back into memory order afterwards. */ | 10803 order and put back into memory order afterwards. */ |
10719 if (TREE_CODE (type) == RECORD_TYPE | 10804 if (TREE_CODE (type) == RECORD_TYPE |
10720 && GET_MODE_CLASS (ext_mode) == MODE_INT) | 10805 && GET_MODE_CLASS (ext_mode) == MODE_INT) |
10721 reversep = TYPE_REVERSE_STORAGE_ORDER (type); | 10806 reversep = TYPE_REVERSE_STORAGE_ORDER (type); |
10722 | 10807 |
10808 gcc_checking_assert (known_ge (bitpos, 0)); | |
10723 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, | 10809 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, |
10724 (modifier == EXPAND_STACK_PARM | 10810 (modifier == EXPAND_STACK_PARM |
10725 ? NULL_RTX : target), | 10811 ? NULL_RTX : target), |
10726 ext_mode, ext_mode, reversep, alt_rtl); | 10812 ext_mode, ext_mode, reversep, alt_rtl); |
10727 | 10813 |
10734 if (TREE_CODE (type) == RECORD_TYPE | 10820 if (TREE_CODE (type) == RECORD_TYPE |
10735 && is_int_mode (GET_MODE (op0), &op0_mode)) | 10821 && is_int_mode (GET_MODE (op0), &op0_mode)) |
10736 { | 10822 { |
10737 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode); | 10823 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode); |
10738 | 10824 |
10739 if (bitsize < size | 10825 gcc_checking_assert (known_le (bitsize, size)); |
10826 if (maybe_lt (bitsize, size) | |
10740 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) | 10827 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
10741 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0, | 10828 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0, |
10742 size - bitsize, op0, 1); | 10829 size - bitsize, op0, 1); |
10743 | 10830 |
10744 if (reversep) | 10831 if (reversep) |
10766 now as well. */ | 10853 now as well. */ |
10767 if (mode == BLKmode) | 10854 if (mode == BLKmode) |
10768 mode1 = BLKmode; | 10855 mode1 = BLKmode; |
10769 | 10856 |
10770 /* Get a reference to just this component. */ | 10857 /* Get a reference to just this component. */ |
10858 bytepos = bits_to_bytes_round_down (bitpos); | |
10771 if (modifier == EXPAND_CONST_ADDRESS | 10859 if (modifier == EXPAND_CONST_ADDRESS |
10772 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | 10860 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
10773 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | 10861 op0 = adjust_address_nv (op0, mode1, bytepos); |
10774 else | 10862 else |
10775 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); | 10863 op0 = adjust_address (op0, mode1, bytepos); |
10776 | 10864 |
10777 if (op0 == orig_op0) | 10865 if (op0 == orig_op0) |
10778 op0 = copy_rtx (op0); | 10866 op0 = copy_rtx (op0); |
10779 | 10867 |
10780 /* Don't set memory attributes if the base expression is | 10868 /* Don't set memory attributes if the base expression is |
10821 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp); | 10909 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp); |
10822 { | 10910 { |
10823 tree fndecl = get_callee_fndecl (exp), attr; | 10911 tree fndecl = get_callee_fndecl (exp), attr; |
10824 | 10912 |
10825 if (fndecl | 10913 if (fndecl |
10914 /* Don't diagnose the error attribute in thunks, those are | |
10915 artificially created. */ | |
10916 && !CALL_FROM_THUNK_P (exp) | |
10826 && (attr = lookup_attribute ("error", | 10917 && (attr = lookup_attribute ("error", |
10827 DECL_ATTRIBUTES (fndecl))) != NULL) | 10918 DECL_ATTRIBUTES (fndecl))) != NULL) |
10828 error ("%Kcall to %qs declared with attribute error: %s", | 10919 { |
10829 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), | 10920 const char *ident = lang_hooks.decl_printable_name (fndecl, 1); |
10830 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); | 10921 error ("%Kcall to %qs declared with attribute error: %s", exp, |
10922 identifier_to_locale (ident), | |
10923 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); | |
10924 } | |
10831 if (fndecl | 10925 if (fndecl |
10926 /* Don't diagnose the warning attribute in thunks, those are | |
10927 artificially created. */ | |
10928 && !CALL_FROM_THUNK_P (exp) | |
10832 && (attr = lookup_attribute ("warning", | 10929 && (attr = lookup_attribute ("warning", |
10833 DECL_ATTRIBUTES (fndecl))) != NULL) | 10930 DECL_ATTRIBUTES (fndecl))) != NULL) |
10834 warning_at (tree_nonartificial_location (exp), | 10931 { |
10835 0, "%Kcall to %qs declared with attribute warning: %s", | 10932 const char *ident = lang_hooks.decl_printable_name (fndecl, 1); |
10836 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), | 10933 warning_at (tree_nonartificial_location (exp), 0, |
10837 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); | 10934 "%Kcall to %qs declared with attribute warning: %s", |
10935 exp, identifier_to_locale (ident), | |
10936 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); | |
10937 } | |
10838 | 10938 |
10839 /* Check for a built-in function. */ | 10939 /* Check for a built-in function. */ |
10840 if (fndecl && DECL_BUILT_IN (fndecl)) | 10940 if (fndecl && fndecl_built_in_p (fndecl)) |
10841 { | 10941 { |
10842 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND); | 10942 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND); |
10843 if (CALL_WITH_BOUNDS_P (exp)) | 10943 return expand_builtin (exp, target, subtarget, tmode, ignore); |
10844 return expand_builtin_with_bounds (exp, target, subtarget, | |
10845 tmode, ignore); | |
10846 else | |
10847 return expand_builtin (exp, target, subtarget, tmode, ignore); | |
10848 } | 10944 } |
10849 } | 10945 } |
10850 return expand_call (exp, target, ignore); | 10946 return expand_call (exp, target, ignore); |
10851 | 10947 |
10852 case VIEW_CONVERT_EXPR: | 10948 case VIEW_CONVERT_EXPR: |
10853 op0 = NULL_RTX; | 10949 op0 = NULL_RTX; |
10854 | 10950 |
10855 /* If we are converting to BLKmode, try to avoid an intermediate | 10951 /* If we are converting to BLKmode, try to avoid an intermediate |
10856 temporary by fetching an inner memory reference. */ | 10952 temporary by fetching an inner memory reference. */ |
10857 if (mode == BLKmode | 10953 if (mode == BLKmode |
10858 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | 10954 && poly_int_tree_p (TYPE_SIZE (type)) |
10859 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode | 10955 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode |
10860 && handled_component_p (treeop0)) | 10956 && handled_component_p (treeop0)) |
10861 { | 10957 { |
10862 machine_mode mode1; | 10958 machine_mode mode1; |
10863 HOST_WIDE_INT bitsize, bitpos; | 10959 poly_int64 bitsize, bitpos, bytepos; |
10864 tree offset; | 10960 tree offset; |
10865 int unsignedp, reversep, volatilep = 0; | 10961 int unsignedp, reversep, volatilep = 0; |
10866 tree tem | 10962 tree tem |
10867 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1, | 10963 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1, |
10868 &unsignedp, &reversep, &volatilep); | 10964 &unsignedp, &reversep, &volatilep); |
10869 rtx orig_op0; | 10965 rtx orig_op0; |
10870 | 10966 |
10871 /* ??? We should work harder and deal with non-zero offsets. */ | 10967 /* ??? We should work harder and deal with non-zero offsets. */ |
10872 if (!offset | 10968 if (!offset |
10873 && (bitpos % BITS_PER_UNIT) == 0 | 10969 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos) |
10874 && !reversep | 10970 && !reversep |
10875 && bitsize >= 0 | 10971 && known_size_p (bitsize) |
10876 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0) | 10972 && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize)) |
10877 { | 10973 { |
10878 /* See the normal_inner_ref case for the rationale. */ | 10974 /* See the normal_inner_ref case for the rationale. */ |
10879 orig_op0 | 10975 orig_op0 |
10880 = expand_expr_real (tem, | 10976 = expand_expr_real (tem, |
10881 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | 10977 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE |
10893 | 10989 |
10894 /* Get a reference to just this component. */ | 10990 /* Get a reference to just this component. */ |
10895 if (modifier == EXPAND_CONST_ADDRESS | 10991 if (modifier == EXPAND_CONST_ADDRESS |
10896 || modifier == EXPAND_SUM | 10992 || modifier == EXPAND_SUM |
10897 || modifier == EXPAND_INITIALIZER) | 10993 || modifier == EXPAND_INITIALIZER) |
10898 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT); | 10994 op0 = adjust_address_nv (op0, mode, bytepos); |
10899 else | 10995 else |
10900 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT); | 10996 op0 = adjust_address (op0, mode, bytepos); |
10901 | 10997 |
10902 if (op0 == orig_op0) | 10998 if (op0 == orig_op0) |
10903 op0 = copy_rtx (op0); | 10999 op0 = copy_rtx (op0); |
10904 | 11000 |
10905 set_mem_attributes (op0, treeop0, 0); | 11001 set_mem_attributes (op0, treeop0, 0); |
10918 /* If the input and output modes are both the same, we are done. */ | 11014 /* If the input and output modes are both the same, we are done. */ |
10919 if (mode == GET_MODE (op0)) | 11015 if (mode == GET_MODE (op0)) |
10920 ; | 11016 ; |
10921 /* If neither mode is BLKmode, and both modes are the same size | 11017 /* If neither mode is BLKmode, and both modes are the same size |
10922 then we can use gen_lowpart. */ | 11018 then we can use gen_lowpart. */ |
10923 else if (mode != BLKmode && GET_MODE (op0) != BLKmode | 11019 else if (mode != BLKmode |
10924 && (GET_MODE_PRECISION (mode) | 11020 && GET_MODE (op0) != BLKmode |
10925 == GET_MODE_PRECISION (GET_MODE (op0))) | 11021 && known_eq (GET_MODE_PRECISION (mode), |
11022 GET_MODE_PRECISION (GET_MODE (op0))) | |
10926 && !COMPLEX_MODE_P (GET_MODE (op0))) | 11023 && !COMPLEX_MODE_P (GET_MODE (op0))) |
10927 { | 11024 { |
10928 if (GET_CODE (op0) == SUBREG) | 11025 if (GET_CODE (op0) == SUBREG) |
10929 op0 = force_reg (GET_MODE (op0), op0); | 11026 op0 = force_reg (GET_MODE (op0), op0); |
10930 temp = gen_lowpart_common (mode, op0); | 11027 temp = gen_lowpart_common (mode, op0); |
11000 emit_insn (insn); | 11097 emit_insn (insn); |
11001 return reg; | 11098 return reg; |
11002 } | 11099 } |
11003 else if (STRICT_ALIGNMENT) | 11100 else if (STRICT_ALIGNMENT) |
11004 { | 11101 { |
11005 tree inner_type = TREE_TYPE (treeop0); | 11102 poly_uint64 mode_size = GET_MODE_SIZE (mode); |
11006 HOST_WIDE_INT temp_size | 11103 poly_uint64 temp_size = mode_size; |
11007 = MAX (int_size_in_bytes (inner_type), | 11104 if (GET_MODE (op0) != BLKmode) |
11008 (HOST_WIDE_INT) GET_MODE_SIZE (mode)); | 11105 temp_size = upper_bound (temp_size, |
11106 GET_MODE_SIZE (GET_MODE (op0))); | |
11009 rtx new_rtx | 11107 rtx new_rtx |
11010 = assign_stack_temp_for_type (mode, temp_size, type); | 11108 = assign_stack_temp_for_type (mode, temp_size, type); |
11011 rtx new_with_op0_mode | 11109 rtx new_with_op0_mode |
11012 = adjust_address (new_rtx, GET_MODE (op0), 0); | 11110 = adjust_address (new_rtx, GET_MODE (op0), 0); |
11013 | 11111 |
11014 gcc_assert (!TREE_ADDRESSABLE (exp)); | 11112 gcc_assert (!TREE_ADDRESSABLE (exp)); |
11015 | 11113 |
11016 if (GET_MODE (op0) == BLKmode) | 11114 if (GET_MODE (op0) == BLKmode) |
11017 emit_block_move (new_with_op0_mode, op0, | 11115 { |
11018 GEN_INT (GET_MODE_SIZE (mode)), | 11116 rtx size_rtx = gen_int_mode (mode_size, Pmode); |
11019 (modifier == EXPAND_STACK_PARM | 11117 emit_block_move (new_with_op0_mode, op0, size_rtx, |
11020 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | 11118 (modifier == EXPAND_STACK_PARM |
11119 ? BLOCK_OP_CALL_PARM | |
11120 : BLOCK_OP_NORMAL)); | |
11121 } | |
11021 else | 11122 else |
11022 emit_move_insn (new_with_op0_mode, op0); | 11123 emit_move_insn (new_with_op0_mode, op0); |
11023 | 11124 |
11024 op0 = new_rtx; | 11125 op0 = new_rtx; |
11025 } | 11126 } |
11052 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) | 11153 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
11053 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | 11154 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) |
11054 { | 11155 { |
11055 rtx_code_label *label = gen_label_rtx (); | 11156 rtx_code_label *label = gen_label_rtx (); |
11056 int value = TREE_CODE (rhs) == BIT_IOR_EXPR; | 11157 int value = TREE_CODE (rhs) == BIT_IOR_EXPR; |
11057 do_jump (TREE_OPERAND (rhs, 1), | 11158 profile_probability prob = profile_probability::uninitialized (); |
11058 value ? label : 0, | 11159 if (value) |
11059 value ? 0 : label, | 11160 jumpifnot (TREE_OPERAND (rhs, 1), label, prob); |
11060 profile_probability::uninitialized ()); | 11161 else |
11162 jumpif (TREE_OPERAND (rhs, 1), label, prob); | |
11061 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value), | 11163 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value), |
11062 false); | 11164 false); |
11063 do_pending_stack_adjust (); | 11165 do_pending_stack_adjust (); |
11064 emit_label (label); | 11166 emit_label (label); |
11065 return const0_rtx; | 11167 return const0_rtx; |
11138 { | 11240 { |
11139 HOST_WIDE_INT prec = TYPE_PRECISION (type); | 11241 HOST_WIDE_INT prec = TYPE_PRECISION (type); |
11140 if (target && GET_MODE (target) != GET_MODE (exp)) | 11242 if (target && GET_MODE (target) != GET_MODE (exp)) |
11141 target = 0; | 11243 target = 0; |
11142 /* For constant values, reduce using build_int_cst_type. */ | 11244 /* For constant values, reduce using build_int_cst_type. */ |
11143 if (CONST_INT_P (exp)) | 11245 poly_int64 const_exp; |
11144 { | 11246 if (poly_int_rtx_p (exp, &const_exp)) |
11145 HOST_WIDE_INT value = INTVAL (exp); | 11247 { |
11146 tree t = build_int_cst_type (type, value); | 11248 tree t = build_int_cst_type (type, const_exp); |
11147 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL); | 11249 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL); |
11148 } | 11250 } |
11149 else if (TYPE_UNSIGNED (type)) | 11251 else if (TYPE_UNSIGNED (type)) |
11150 { | 11252 { |
11151 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp)); | 11253 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp)); |
11198 /* This must now be the address of EXP. */ | 11300 /* This must now be the address of EXP. */ |
11199 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; | 11301 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; |
11200 } | 11302 } |
11201 | 11303 |
11202 /* Return the tree node if an ARG corresponds to a string constant or zero | 11304 /* Return the tree node if an ARG corresponds to a string constant or zero |
11203 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset | 11305 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly |
11204 in bytes within the string that ARG is accessing. The type of the | 11306 non-constant) offset in bytes within the string that ARG is accessing. |
11205 offset will be `sizetype'. */ | 11307 If MEM_SIZE is non-zero the storage size of the memory is returned. |
11308 If DECL is non-zero the constant declaration is returned if available. */ | |
11206 | 11309 |
11207 tree | 11310 tree |
11208 string_constant (tree arg, tree *ptr_offset) | 11311 string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl) |
11209 { | 11312 { |
11210 tree array, offset, lower_bound; | 11313 tree array; |
11211 STRIP_NOPS (arg); | 11314 STRIP_NOPS (arg); |
11212 | 11315 |
11316 /* Non-constant index into the character array in an ARRAY_REF | |
11317 expression or null. */ | |
11318 tree varidx = NULL_TREE; | |
11319 | |
11320 poly_int64 base_off = 0; | |
11321 | |
11213 if (TREE_CODE (arg) == ADDR_EXPR) | 11322 if (TREE_CODE (arg) == ADDR_EXPR) |
11214 { | 11323 { |
11215 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) | 11324 arg = TREE_OPERAND (arg, 0); |
11216 { | 11325 tree ref = arg; |
11217 *ptr_offset = size_zero_node; | 11326 if (TREE_CODE (arg) == ARRAY_REF) |
11218 return TREE_OPERAND (arg, 0); | 11327 { |
11219 } | 11328 tree idx = TREE_OPERAND (arg, 1); |
11220 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL) | 11329 if (TREE_CODE (idx) != INTEGER_CST) |
11221 { | |
11222 array = TREE_OPERAND (arg, 0); | |
11223 offset = size_zero_node; | |
11224 } | |
11225 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) | |
11226 { | |
11227 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | |
11228 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1); | |
11229 if (TREE_CODE (array) != STRING_CST && !VAR_P (array)) | |
11230 return 0; | |
11231 | |
11232 /* Check if the array has a nonzero lower bound. */ | |
11233 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0)); | |
11234 if (!integer_zerop (lower_bound)) | |
11235 { | 11330 { |
11236 /* If the offset and base aren't both constants, return 0. */ | 11331 /* From a pointer (but not array) argument extract the variable |
11237 if (TREE_CODE (lower_bound) != INTEGER_CST) | 11332 index to prevent get_addr_base_and_unit_offset() from failing |
11238 return 0; | 11333 due to it. Use it later to compute the non-constant offset |
11239 if (TREE_CODE (offset) != INTEGER_CST) | 11334 into the string and return it to the caller. */ |
11240 return 0; | 11335 varidx = idx; |
11241 /* Adjust offset by the lower bound. */ | 11336 ref = TREE_OPERAND (arg, 0); |
11242 offset = size_diffop (fold_convert (sizetype, offset), | 11337 |
11243 fold_convert (sizetype, lower_bound)); | 11338 if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE) |
11339 return NULL_TREE; | |
11340 | |
11341 if (!integer_zerop (array_ref_low_bound (arg))) | |
11342 return NULL_TREE; | |
11343 | |
11344 if (!integer_onep (array_ref_element_size (arg))) | |
11345 return NULL_TREE; | |
11244 } | 11346 } |
11245 } | 11347 } |
11246 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF) | 11348 array = get_addr_base_and_unit_offset (ref, &base_off); |
11247 { | 11349 if (!array |
11248 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | 11350 || (TREE_CODE (array) != VAR_DECL |
11249 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1); | 11351 && TREE_CODE (array) != CONST_DECL |
11250 if (TREE_CODE (array) != ADDR_EXPR) | 11352 && TREE_CODE (array) != STRING_CST)) |
11251 return 0; | 11353 return NULL_TREE; |
11252 array = TREE_OPERAND (array, 0); | |
11253 if (TREE_CODE (array) != STRING_CST && !VAR_P (array)) | |
11254 return 0; | |
11255 } | |
11256 else | |
11257 return 0; | |
11258 } | 11354 } |
11259 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR) | 11355 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR) |
11260 { | 11356 { |
11261 tree arg0 = TREE_OPERAND (arg, 0); | 11357 tree arg0 = TREE_OPERAND (arg, 0); |
11262 tree arg1 = TREE_OPERAND (arg, 1); | 11358 tree arg1 = TREE_OPERAND (arg, 1); |
11263 | 11359 |
11264 STRIP_NOPS (arg0); | 11360 STRIP_NOPS (arg0); |
11265 STRIP_NOPS (arg1); | 11361 STRIP_NOPS (arg1); |
11266 | 11362 |
11267 if (TREE_CODE (arg0) == ADDR_EXPR | 11363 if (TREE_CODE (arg0) == ADDR_EXPR) |
11268 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST | 11364 ; /* Do nothing. */ |
11269 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL)) | 11365 else if (TREE_CODE (arg1) == ADDR_EXPR) |
11270 { | 11366 std::swap (arg0, arg1); |
11271 array = TREE_OPERAND (arg0, 0); | |
11272 offset = arg1; | |
11273 } | |
11274 else if (TREE_CODE (arg1) == ADDR_EXPR | |
11275 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST | |
11276 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL)) | |
11277 { | |
11278 array = TREE_OPERAND (arg1, 0); | |
11279 offset = arg0; | |
11280 } | |
11281 else | 11367 else |
11282 return 0; | 11368 return NULL_TREE; |
11283 } | 11369 |
11370 tree offset; | |
11371 if (tree str = string_constant (arg0, &offset, mem_size, decl)) | |
11372 { | |
11373 /* Avoid pointers to arrays (see bug 86622). */ | |
11374 if (POINTER_TYPE_P (TREE_TYPE (arg)) | |
11375 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == ARRAY_TYPE | |
11376 && !(decl && !*decl) | |
11377 && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl)) | |
11378 && mem_size && tree_fits_uhwi_p (*mem_size) | |
11379 && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl)))) | |
11380 return NULL_TREE; | |
11381 | |
11382 tree type = TREE_TYPE (arg1); | |
11383 *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, arg1); | |
11384 return str; | |
11385 } | |
11386 return NULL_TREE; | |
11387 } | |
11388 else if (TREE_CODE (arg) == SSA_NAME) | |
11389 { | |
11390 gimple *stmt = SSA_NAME_DEF_STMT (arg); | |
11391 if (!is_gimple_assign (stmt)) | |
11392 return NULL_TREE; | |
11393 | |
11394 tree rhs1 = gimple_assign_rhs1 (stmt); | |
11395 tree_code code = gimple_assign_rhs_code (stmt); | |
11396 if (code == ADDR_EXPR) | |
11397 return string_constant (rhs1, ptr_offset, mem_size, decl); | |
11398 else if (code != POINTER_PLUS_EXPR) | |
11399 return NULL_TREE; | |
11400 | |
11401 tree offset; | |
11402 if (tree str = string_constant (rhs1, &offset, mem_size, decl)) | |
11403 { | |
11404 /* Avoid pointers to arrays (see bug 86622). */ | |
11405 if (POINTER_TYPE_P (TREE_TYPE (rhs1)) | |
11406 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1))) == ARRAY_TYPE | |
11407 && !(decl && !*decl) | |
11408 && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl)) | |
11409 && mem_size && tree_fits_uhwi_p (*mem_size) | |
11410 && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl)))) | |
11411 return NULL_TREE; | |
11412 | |
11413 tree rhs2 = gimple_assign_rhs2 (stmt); | |
11414 tree type = TREE_TYPE (rhs2); | |
11415 *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, rhs2); | |
11416 return str; | |
11417 } | |
11418 return NULL_TREE; | |
11419 } | |
11420 else if (DECL_P (arg)) | |
11421 array = arg; | |
11284 else | 11422 else |
11285 return 0; | 11423 return NULL_TREE; |
11424 | |
11425 tree offset = wide_int_to_tree (sizetype, base_off); | |
11426 if (varidx) | |
11427 { | |
11428 if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE) | |
11429 return NULL_TREE; | |
11430 | |
11431 gcc_assert (TREE_CODE (arg) == ARRAY_REF); | |
11432 tree chartype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg, 0))); | |
11433 if (TREE_CODE (chartype) != INTEGER_TYPE) | |
11434 return NULL; | |
11435 | |
11436 offset = fold_convert (sizetype, varidx); | |
11437 } | |
11286 | 11438 |
11287 if (TREE_CODE (array) == STRING_CST) | 11439 if (TREE_CODE (array) == STRING_CST) |
11288 { | 11440 { |
11289 *ptr_offset = fold_convert (sizetype, offset); | 11441 *ptr_offset = fold_convert (sizetype, offset); |
11442 if (mem_size) | |
11443 *mem_size = TYPE_SIZE_UNIT (TREE_TYPE (array)); | |
11444 if (decl) | |
11445 *decl = NULL_TREE; | |
11446 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array))) | |
11447 >= TREE_STRING_LENGTH (array)); | |
11290 return array; | 11448 return array; |
11291 } | 11449 } |
11292 else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL) | 11450 |
11293 { | 11451 if (!VAR_P (array) && TREE_CODE (array) != CONST_DECL) |
11294 int length; | 11452 return NULL_TREE; |
11295 tree init = ctor_for_folding (array); | 11453 |
11296 | 11454 tree init = ctor_for_folding (array); |
11297 /* Variables initialized to string literals can be handled too. */ | 11455 |
11298 if (init == error_mark_node | 11456 /* Handle variables initialized with string literals. */ |
11299 || !init | 11457 if (!init || init == error_mark_node) |
11300 || TREE_CODE (init) != STRING_CST) | 11458 return NULL_TREE; |
11301 return 0; | 11459 if (TREE_CODE (init) == CONSTRUCTOR) |
11302 | 11460 { |
11303 /* Avoid const char foo[4] = "abcde"; */ | 11461 /* Convert the 64-bit constant offset to a wider type to avoid |
11304 if (DECL_SIZE_UNIT (array) == NULL_TREE | 11462 overflow. */ |
11305 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST | 11463 offset_int wioff; |
11306 || (length = TREE_STRING_LENGTH (init)) <= 0 | 11464 if (!base_off.is_constant (&wioff)) |
11307 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0) | 11465 return NULL_TREE; |
11308 return 0; | 11466 |
11309 | 11467 wioff *= BITS_PER_UNIT; |
11310 /* If variable is bigger than the string literal, OFFSET must be constant | 11468 if (!wi::fits_uhwi_p (wioff)) |
11311 and inside of the bounds of the string literal. */ | 11469 return NULL_TREE; |
11312 offset = fold_convert (sizetype, offset); | 11470 |
11313 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0 | 11471 base_off = wioff.to_uhwi (); |
11314 && (! tree_fits_uhwi_p (offset) | 11472 unsigned HOST_WIDE_INT fieldoff = 0; |
11315 || compare_tree_int (offset, length) >= 0)) | 11473 init = fold_ctor_reference (NULL_TREE, init, base_off, 0, array, |
11316 return 0; | 11474 &fieldoff); |
11317 | 11475 HOST_WIDE_INT cstoff; |
11318 *ptr_offset = offset; | 11476 if (!base_off.is_constant (&cstoff)) |
11319 return init; | 11477 return NULL_TREE; |
11320 } | 11478 |
11321 | 11479 cstoff = (cstoff - fieldoff) / BITS_PER_UNIT; |
11322 return 0; | 11480 tree off = build_int_cst (sizetype, cstoff); |
11481 if (varidx) | |
11482 offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset), offset, off); | |
11483 else | |
11484 offset = off; | |
11485 } | |
11486 | |
11487 if (!init || TREE_CODE (init) != STRING_CST) | |
11488 return NULL_TREE; | |
11489 | |
11490 if (mem_size) | |
11491 *mem_size = TYPE_SIZE_UNIT (TREE_TYPE (init)); | |
11492 if (decl) | |
11493 *decl = array; | |
11494 | |
11495 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (init))) | |
11496 >= TREE_STRING_LENGTH (init)); | |
11497 | |
11498 *ptr_offset = offset; | |
11499 return init; | |
11500 } | |
11501 | |
11502 /* Compute the modular multiplicative inverse of A modulo M | |
11503 using extended Euclid's algorithm. Assumes A and M are coprime. */ | |
11504 static wide_int | |
11505 mod_inv (const wide_int &a, const wide_int &b) | |
11506 { | |
11507 /* Verify the assumption. */ | |
11508 gcc_checking_assert (wi::eq_p (wi::gcd (a, b), 1)); | |
11509 | |
11510 unsigned int p = a.get_precision () + 1; | |
11511 gcc_checking_assert (b.get_precision () + 1 == p); | |
11512 wide_int c = wide_int::from (a, p, UNSIGNED); | |
11513 wide_int d = wide_int::from (b, p, UNSIGNED); | |
11514 wide_int x0 = wide_int::from (0, p, UNSIGNED); | |
11515 wide_int x1 = wide_int::from (1, p, UNSIGNED); | |
11516 | |
11517 if (wi::eq_p (b, 1)) | |
11518 return wide_int::from (1, p, UNSIGNED); | |
11519 | |
11520 while (wi::gt_p (c, 1, UNSIGNED)) | |
11521 { | |
11522 wide_int t = d; | |
11523 wide_int q = wi::divmod_trunc (c, d, UNSIGNED, &d); | |
11524 c = t; | |
11525 wide_int s = x0; | |
11526 x0 = wi::sub (x1, wi::mul (q, x0)); | |
11527 x1 = s; | |
11528 } | |
11529 if (wi::lt_p (x1, 0, SIGNED)) | |
11530 x1 += d; | |
11531 return x1; | |
11532 } | |
11533 | |
11534 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2 | |
11535 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)): | |
11536 for C2 > 0 to x & C3 == C2 | |
11537 for C2 < 0 to x & C3 == (C2 & C3). */ | |
11538 enum tree_code | |
11539 maybe_optimize_pow2p_mod_cmp (enum tree_code code, tree *arg0, tree *arg1) | |
11540 { | |
11541 gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR); | |
11542 tree treeop0 = gimple_assign_rhs1 (stmt); | |
11543 tree treeop1 = gimple_assign_rhs2 (stmt); | |
11544 tree type = TREE_TYPE (*arg0); | |
11545 scalar_int_mode mode; | |
11546 if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode)) | |
11547 return code; | |
11548 if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type) | |
11549 || TYPE_PRECISION (type) <= 1 | |
11550 || TYPE_UNSIGNED (type) | |
11551 /* Signed x % c == 0 should have been optimized into unsigned modulo | |
11552 earlier. */ | |
11553 || integer_zerop (*arg1) | |
11554 /* If c is known to be non-negative, modulo will be expanded as unsigned | |
11555 modulo. */ | |
11556 || get_range_pos_neg (treeop0) == 1) | |
11557 return code; | |
11558 | |
11559 /* x % c == d where d < 0 && d <= -c should be always false. */ | |
11560 if (tree_int_cst_sgn (*arg1) == -1 | |
11561 && -wi::to_widest (treeop1) >= wi::to_widest (*arg1)) | |
11562 return code; | |
11563 | |
11564 int prec = TYPE_PRECISION (type); | |
11565 wide_int w = wi::to_wide (treeop1) - 1; | |
11566 w |= wi::shifted_mask (0, prec - 1, true, prec); | |
11567 tree c3 = wide_int_to_tree (type, w); | |
11568 tree c4 = *arg1; | |
11569 if (tree_int_cst_sgn (*arg1) == -1) | |
11570 c4 = wide_int_to_tree (type, w & wi::to_wide (*arg1)); | |
11571 | |
11572 rtx op0 = expand_normal (treeop0); | |
11573 treeop0 = make_tree (TREE_TYPE (treeop0), op0); | |
11574 | |
11575 bool speed_p = optimize_insn_for_speed_p (); | |
11576 | |
11577 do_pending_stack_adjust (); | |
11578 | |
11579 location_t loc = gimple_location (stmt); | |
11580 struct separate_ops ops; | |
11581 ops.code = TRUNC_MOD_EXPR; | |
11582 ops.location = loc; | |
11583 ops.type = TREE_TYPE (treeop0); | |
11584 ops.op0 = treeop0; | |
11585 ops.op1 = treeop1; | |
11586 ops.op2 = NULL_TREE; | |
11587 start_sequence (); | |
11588 rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type), | |
11589 EXPAND_NORMAL); | |
11590 rtx_insn *moinsns = get_insns (); | |
11591 end_sequence (); | |
11592 | |
11593 unsigned mocost = seq_cost (moinsns, speed_p); | |
11594 mocost += rtx_cost (mor, mode, EQ, 0, speed_p); | |
11595 mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p); | |
11596 | |
11597 ops.code = BIT_AND_EXPR; | |
11598 ops.location = loc; | |
11599 ops.type = TREE_TYPE (treeop0); | |
11600 ops.op0 = treeop0; | |
11601 ops.op1 = c3; | |
11602 ops.op2 = NULL_TREE; | |
11603 start_sequence (); | |
11604 rtx mur = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type), | |
11605 EXPAND_NORMAL); | |
11606 rtx_insn *muinsns = get_insns (); | |
11607 end_sequence (); | |
11608 | |
11609 unsigned mucost = seq_cost (muinsns, speed_p); | |
11610 mucost += rtx_cost (mur, mode, EQ, 0, speed_p); | |
11611 mucost += rtx_cost (expand_normal (c4), mode, EQ, 1, speed_p); | |
11612 | |
11613 if (mocost <= mucost) | |
11614 { | |
11615 emit_insn (moinsns); | |
11616 *arg0 = make_tree (TREE_TYPE (*arg0), mor); | |
11617 return code; | |
11618 } | |
11619 | |
11620 emit_insn (muinsns); | |
11621 *arg0 = make_tree (TREE_TYPE (*arg0), mur); | |
11622 *arg1 = c4; | |
11623 return code; | |
11624 } | |
11625 | |
11626 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2). | |
11627 If C1 is odd to: | |
11628 (X - C2) * C3 <= C4 (or >), where | |
11629 C3 is modular multiplicative inverse of C1 and 1<<prec and | |
11630 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter | |
11631 if C2 > ((1<<prec) - 1) % C1). | |
11632 If C1 is even, S = ctz (C1) and C2 is 0, use | |
11633 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative | |
11634 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S. | |
11635 | |
11636 For signed (X % C1) == 0 if C1 is odd to (all operations in it | |
11637 unsigned): | |
11638 (X * C3) + C4 <= 2 * C4, where | |
11639 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and | |
11640 C4 is ((1<<(prec - 1) - 1) / C1). | |
11641 If C1 is even, S = ctz(C1), use | |
11642 ((X * C3) + C4) r>> S <= (C4 >> (S - 1)) | |
11643 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec | |
11644 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S). | |
11645 | |
11646 See the Hacker's Delight book, section 10-17. */ | |
11647 enum tree_code | |
11648 maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1) | |
11649 { | |
11650 gcc_checking_assert (code == EQ_EXPR || code == NE_EXPR); | |
11651 gcc_checking_assert (TREE_CODE (*arg1) == INTEGER_CST); | |
11652 | |
11653 if (optimize < 2) | |
11654 return code; | |
11655 | |
11656 gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR); | |
11657 if (stmt == NULL) | |
11658 return code; | |
11659 | |
11660 tree treeop0 = gimple_assign_rhs1 (stmt); | |
11661 tree treeop1 = gimple_assign_rhs2 (stmt); | |
11662 if (TREE_CODE (treeop0) != SSA_NAME | |
11663 || TREE_CODE (treeop1) != INTEGER_CST | |
11664 /* Don't optimize the undefined behavior case x % 0; | |
11665 x % 1 should have been optimized into zero, punt if | |
11666 it makes it here for whatever reason; | |
11667 x % -c should have been optimized into x % c. */ | |
11668 || compare_tree_int (treeop1, 2) <= 0 | |
11669 /* Likewise x % c == d where d >= c should be always false. */ | |
11670 || tree_int_cst_le (treeop1, *arg1)) | |
11671 return code; | |
11672 | |
11673 /* Unsigned x % pow2 is handled right already, for signed | |
11674 modulo handle it in maybe_optimize_pow2p_mod_cmp. */ | |
11675 if (integer_pow2p (treeop1)) | |
11676 return maybe_optimize_pow2p_mod_cmp (code, arg0, arg1); | |
11677 | |
11678 tree type = TREE_TYPE (*arg0); | |
11679 scalar_int_mode mode; | |
11680 if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode)) | |
11681 return code; | |
11682 if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type) | |
11683 || TYPE_PRECISION (type) <= 1) | |
11684 return code; | |
11685 | |
11686 signop sgn = UNSIGNED; | |
11687 /* If both operands are known to have the sign bit clear, handle | |
11688 even the signed modulo case as unsigned. treeop1 is always | |
11689 positive >= 2, checked above. */ | |
11690 if (!TYPE_UNSIGNED (type) && get_range_pos_neg (treeop0) != 1) | |
11691 sgn = SIGNED; | |
11692 | |
11693 if (!TYPE_UNSIGNED (type)) | |
11694 { | |
11695 if (tree_int_cst_sgn (*arg1) == -1) | |
11696 return code; | |
11697 type = unsigned_type_for (type); | |
11698 if (!type || TYPE_MODE (type) != TYPE_MODE (TREE_TYPE (*arg0))) | |
11699 return code; | |
11700 } | |
11701 | |
11702 int prec = TYPE_PRECISION (type); | |
11703 wide_int w = wi::to_wide (treeop1); | |
11704 int shift = wi::ctz (w); | |
11705 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if | |
11706 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0. | |
11707 If C1 is odd, we can handle all cases by subtracting | |
11708 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases | |
11709 e.g. by testing for overflow on the subtraction, punt on that for now | |
11710 though. */ | |
11711 if ((sgn == SIGNED || shift) && !integer_zerop (*arg1)) | |
11712 { | |
11713 if (sgn == SIGNED) | |
11714 return code; | |
11715 wide_int x = wi::umod_trunc (wi::mask (prec, false, prec), w); | |
11716 if (wi::gtu_p (wi::to_wide (*arg1), x)) | |
11717 return code; | |
11718 } | |
11719 | |
11720 imm_use_iterator imm_iter; | |
11721 use_operand_p use_p; | |
11722 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, treeop0) | |
11723 { | |
11724 gimple *use_stmt = USE_STMT (use_p); | |
11725 /* Punt if treeop0 is used in the same bb in a division | |
11726 or another modulo with the same divisor. We should expect | |
11727 the division and modulo combined together. */ | |
11728 if (use_stmt == stmt | |
11729 || gimple_bb (use_stmt) != gimple_bb (stmt)) | |
11730 continue; | |
11731 if (!is_gimple_assign (use_stmt) | |
11732 || (gimple_assign_rhs_code (use_stmt) != TRUNC_DIV_EXPR | |
11733 && gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR)) | |
11734 continue; | |
11735 if (gimple_assign_rhs1 (use_stmt) != treeop0 | |
11736 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), treeop1, 0)) | |
11737 continue; | |
11738 return code; | |
11739 } | |
11740 | |
11741 w = wi::lrshift (w, shift); | |
11742 wide_int a = wide_int::from (w, prec + 1, UNSIGNED); | |
11743 wide_int b = wi::shifted_mask (prec, 1, false, prec + 1); | |
11744 wide_int m = wide_int::from (mod_inv (a, b), prec, UNSIGNED); | |
11745 tree c3 = wide_int_to_tree (type, m); | |
11746 tree c5 = NULL_TREE; | |
11747 wide_int d, e; | |
11748 if (sgn == UNSIGNED) | |
11749 { | |
11750 d = wi::divmod_trunc (wi::mask (prec, false, prec), w, UNSIGNED, &e); | |
11751 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1, | |
11752 otherwise use < or subtract one from C4. E.g. for | |
11753 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but | |
11754 x % 3U == 1 already needs to be | |
11755 (x - 1) * 0xaaaaaaabU <= 0x55555554. */ | |
11756 if (!shift && wi::gtu_p (wi::to_wide (*arg1), e)) | |
11757 d -= 1; | |
11758 if (shift) | |
11759 d = wi::lrshift (d, shift); | |
11760 } | |
11761 else | |
11762 { | |
11763 e = wi::udiv_trunc (wi::mask (prec - 1, false, prec), w); | |
11764 if (!shift) | |
11765 d = wi::lshift (e, 1); | |
11766 else | |
11767 { | |
11768 e = wi::bit_and (e, wi::mask (shift, true, prec)); | |
11769 d = wi::lrshift (e, shift - 1); | |
11770 } | |
11771 c5 = wide_int_to_tree (type, e); | |
11772 } | |
11773 tree c4 = wide_int_to_tree (type, d); | |
11774 | |
11775 rtx op0 = expand_normal (treeop0); | |
11776 treeop0 = make_tree (TREE_TYPE (treeop0), op0); | |
11777 | |
11778 bool speed_p = optimize_insn_for_speed_p (); | |
11779 | |
11780 do_pending_stack_adjust (); | |
11781 | |
11782 location_t loc = gimple_location (stmt); | |
11783 struct separate_ops ops; | |
11784 ops.code = TRUNC_MOD_EXPR; | |
11785 ops.location = loc; | |
11786 ops.type = TREE_TYPE (treeop0); | |
11787 ops.op0 = treeop0; | |
11788 ops.op1 = treeop1; | |
11789 ops.op2 = NULL_TREE; | |
11790 start_sequence (); | |
11791 rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type), | |
11792 EXPAND_NORMAL); | |
11793 rtx_insn *moinsns = get_insns (); | |
11794 end_sequence (); | |
11795 | |
11796 unsigned mocost = seq_cost (moinsns, speed_p); | |
11797 mocost += rtx_cost (mor, mode, EQ, 0, speed_p); | |
11798 mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p); | |
11799 | |
11800 tree t = fold_convert_loc (loc, type, treeop0); | |
11801 if (!integer_zerop (*arg1)) | |
11802 t = fold_build2_loc (loc, MINUS_EXPR, type, t, fold_convert (type, *arg1)); | |
11803 t = fold_build2_loc (loc, MULT_EXPR, type, t, c3); | |
11804 if (sgn == SIGNED) | |
11805 t = fold_build2_loc (loc, PLUS_EXPR, type, t, c5); | |
11806 if (shift) | |
11807 { | |
11808 tree s = build_int_cst (NULL_TREE, shift); | |
11809 t = fold_build2_loc (loc, RROTATE_EXPR, type, t, s); | |
11810 } | |
11811 | |
11812 start_sequence (); | |
11813 rtx mur = expand_normal (t); | |
11814 rtx_insn *muinsns = get_insns (); | |
11815 end_sequence (); | |
11816 | |
11817 unsigned mucost = seq_cost (muinsns, speed_p); | |
11818 mucost += rtx_cost (mur, mode, LE, 0, speed_p); | |
11819 mucost += rtx_cost (expand_normal (c4), mode, LE, 1, speed_p); | |
11820 | |
11821 if (mocost <= mucost) | |
11822 { | |
11823 emit_insn (moinsns); | |
11824 *arg0 = make_tree (TREE_TYPE (*arg0), mor); | |
11825 return code; | |
11826 } | |
11827 | |
11828 emit_insn (muinsns); | |
11829 *arg0 = make_tree (type, mur); | |
11830 *arg1 = c4; | |
11831 return code == EQ_EXPR ? LE_EXPR : GT_EXPR; | |
11323 } | 11832 } |
11324 | 11833 |
11325 /* Generate code to calculate OPS, and exploded expression | 11834 /* Generate code to calculate OPS, and exploded expression |
11326 using a store-flag instruction and return an rtx for the result. | 11835 using a store-flag instruction and return an rtx for the result. |
11327 OPS reflects a comparison. | 11836 OPS reflects a comparison. |
11367 return 0; | 11876 return 0; |
11368 | 11877 |
11369 /* We won't bother with store-flag operations involving function pointers | 11878 /* We won't bother with store-flag operations involving function pointers |
11370 when function pointers must be canonicalized before comparisons. */ | 11879 when function pointers must be canonicalized before comparisons. */ |
11371 if (targetm.have_canonicalize_funcptr_for_compare () | 11880 if (targetm.have_canonicalize_funcptr_for_compare () |
11372 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE | 11881 && ((POINTER_TYPE_P (TREE_TYPE (arg0)) |
11373 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) | 11882 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))) |
11374 == FUNCTION_TYPE)) | 11883 || (POINTER_TYPE_P (TREE_TYPE (arg1)) |
11375 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE | 11884 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))))) |
11376 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) | |
11377 == FUNCTION_TYPE)))) | |
11378 return 0; | 11885 return 0; |
11379 | 11886 |
11380 STRIP_NOPS (arg0); | 11887 STRIP_NOPS (arg0); |
11381 STRIP_NOPS (arg1); | 11888 STRIP_NOPS (arg1); |
11382 | 11889 |
11383 /* For vector typed comparisons emit code to generate the desired | 11890 /* For vector typed comparisons emit code to generate the desired |
11384 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR | 11891 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR |
11385 expander for this. */ | 11892 expander for this. */ |
11386 if (TREE_CODE (ops->type) == VECTOR_TYPE) | 11893 if (TREE_CODE (ops->type) == VECTOR_TYPE) |
11387 { | 11894 { |
11393 { | 11900 { |
11394 tree if_true = constant_boolean_node (true, ops->type); | 11901 tree if_true = constant_boolean_node (true, ops->type); |
11395 tree if_false = constant_boolean_node (false, ops->type); | 11902 tree if_false = constant_boolean_node (false, ops->type); |
11396 return expand_vec_cond_expr (ops->type, ifexp, if_true, | 11903 return expand_vec_cond_expr (ops->type, ifexp, if_true, |
11397 if_false, target); | 11904 if_false, target); |
11905 } | |
11906 } | |
11907 | |
11908 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial | |
11909 into (x - C2) * C3 < C4. */ | |
11910 if ((ops->code == EQ_EXPR || ops->code == NE_EXPR) | |
11911 && TREE_CODE (arg0) == SSA_NAME | |
11912 && TREE_CODE (arg1) == INTEGER_CST) | |
11913 { | |
11914 enum tree_code code = maybe_optimize_mod_cmp (ops->code, &arg0, &arg1); | |
11915 if (code != ops->code) | |
11916 { | |
11917 struct separate_ops nops = *ops; | |
11918 nops.code = ops->code = code; | |
11919 nops.op0 = arg0; | |
11920 nops.op1 = arg1; | |
11921 nops.type = TREE_TYPE (arg0); | |
11922 return do_store_flag (&nops, target, mode); | |
11398 } | 11923 } |
11399 } | 11924 } |
11400 | 11925 |
11401 /* Get the rtx comparison code to use. We know that EXP is a comparison | 11926 /* Get the rtx comparison code to use. We know that EXP is a comparison |
11402 operation of some type. Some comparisons against 1 and -1 can be | 11927 operation of some type. Some comparisons against 1 and -1 can be |
11614 | 12139 |
11615 if (default_label) | 12140 if (default_label) |
11616 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, | 12141 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, |
11617 default_label, default_probability); | 12142 default_label, default_probability); |
11618 | 12143 |
11619 | |
11620 /* If index is in range, it must fit in Pmode. | 12144 /* If index is in range, it must fit in Pmode. |
11621 Convert to Pmode so we can index with it. */ | 12145 Convert to Pmode so we can index with it. */ |
11622 if (mode != Pmode) | 12146 if (mode != Pmode) |
11623 index = convert_to_mode (Pmode, index, 1); | 12147 { |
12148 unsigned int width; | |
12149 | |
12150 /* We know the value of INDEX is between 0 and RANGE. If we have a | |
12151 sign-extended subreg, and RANGE does not have the sign bit set, then | |
12152 we have a value that is valid for both sign and zero extension. In | |
12153 this case, we get better code if we sign extend. */ | |
12154 if (GET_CODE (index) == SUBREG | |
12155 && SUBREG_PROMOTED_VAR_P (index) | |
12156 && SUBREG_PROMOTED_SIGNED_P (index) | |
12157 && ((width = GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))) | |
12158 <= HOST_BITS_PER_WIDE_INT) | |
12159 && ! (UINTVAL (range) & (HOST_WIDE_INT_1U << (width - 1)))) | |
12160 index = convert_to_mode (Pmode, index, 0); | |
12161 else | |
12162 index = convert_to_mode (Pmode, index, 1); | |
12163 } | |
11624 | 12164 |
11625 /* Don't let a MEM slip through, because then INDEX that comes | 12165 /* Don't let a MEM slip through, because then INDEX that comes |
11626 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, | 12166 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, |
11627 and break_out_memory_refs will go to work on it and mess it up. */ | 12167 and break_out_memory_refs will go to work on it and mess it up. */ |
11628 #ifdef PIC_CASE_VECTOR_ADDRESS | 12168 #ifdef PIC_CASE_VECTOR_ADDRESS |
11686 /* Return a CONST_VECTOR rtx representing vector mask for | 12226 /* Return a CONST_VECTOR rtx representing vector mask for |
11687 a VECTOR_CST of booleans. */ | 12227 a VECTOR_CST of booleans. */ |
11688 static rtx | 12228 static rtx |
11689 const_vector_mask_from_tree (tree exp) | 12229 const_vector_mask_from_tree (tree exp) |
11690 { | 12230 { |
11691 rtvec v; | 12231 machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); |
11692 unsigned i, units; | 12232 machine_mode inner = GET_MODE_INNER (mode); |
11693 tree elt; | 12233 |
11694 machine_mode inner, mode; | 12234 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp), |
11695 | 12235 VECTOR_CST_NELTS_PER_PATTERN (exp)); |
11696 mode = TYPE_MODE (TREE_TYPE (exp)); | 12236 unsigned int count = builder.encoded_nelts (); |
11697 units = VECTOR_CST_NELTS (exp); | 12237 for (unsigned int i = 0; i < count; ++i) |
11698 inner = GET_MODE_INNER (mode); | 12238 { |
11699 | 12239 tree elt = VECTOR_CST_ELT (exp, i); |
11700 v = rtvec_alloc (units); | |
11701 | |
11702 for (i = 0; i < units; ++i) | |
11703 { | |
11704 elt = VECTOR_CST_ELT (exp, i); | |
11705 | |
11706 gcc_assert (TREE_CODE (elt) == INTEGER_CST); | 12240 gcc_assert (TREE_CODE (elt) == INTEGER_CST); |
11707 if (integer_zerop (elt)) | 12241 if (integer_zerop (elt)) |
11708 RTVEC_ELT (v, i) = CONST0_RTX (inner); | 12242 builder.quick_push (CONST0_RTX (inner)); |
11709 else if (integer_onep (elt) | 12243 else if (integer_onep (elt) |
11710 || integer_minus_onep (elt)) | 12244 || integer_minus_onep (elt)) |
11711 RTVEC_ELT (v, i) = CONSTM1_RTX (inner); | 12245 builder.quick_push (CONSTM1_RTX (inner)); |
11712 else | 12246 else |
11713 gcc_unreachable (); | 12247 gcc_unreachable (); |
11714 } | 12248 } |
11715 | 12249 return builder.build (); |
11716 return gen_rtx_CONST_VECTOR (mode, v); | |
11717 } | 12250 } |
11718 | 12251 |
11719 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones. | 12252 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones. |
11720 Return a constant scalar rtx of mode MODE in which bit X is set if element | 12253 Return a constant scalar rtx of mode MODE in which bit X is set if element |
11721 X of EXP is nonzero. */ | 12254 X of EXP is nonzero. */ |
11722 static rtx | 12255 static rtx |
11723 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp) | 12256 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp) |
11724 { | 12257 { |
11725 wide_int res = wi::zero (GET_MODE_PRECISION (mode)); | 12258 wide_int res = wi::zero (GET_MODE_PRECISION (mode)); |
11726 tree elt; | 12259 tree elt; |
11727 unsigned i; | 12260 |
11728 | 12261 /* The result has a fixed number of bits so the input must too. */ |
11729 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i) | 12262 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant (); |
12263 for (unsigned int i = 0; i < nunits; ++i) | |
11730 { | 12264 { |
11731 elt = VECTOR_CST_ELT (exp, i); | 12265 elt = VECTOR_CST_ELT (exp, i); |
11732 gcc_assert (TREE_CODE (elt) == INTEGER_CST); | 12266 gcc_assert (TREE_CODE (elt) == INTEGER_CST); |
11733 if (integer_all_onesp (elt)) | 12267 if (integer_all_onesp (elt)) |
11734 res = wi::set_bit (res, i); | 12268 res = wi::set_bit (res, i); |
11741 | 12275 |
11742 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ | 12276 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ |
11743 static rtx | 12277 static rtx |
11744 const_vector_from_tree (tree exp) | 12278 const_vector_from_tree (tree exp) |
11745 { | 12279 { |
11746 rtvec v; | 12280 machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); |
11747 unsigned i, units; | |
11748 tree elt; | |
11749 machine_mode inner, mode; | |
11750 | |
11751 mode = TYPE_MODE (TREE_TYPE (exp)); | |
11752 | 12281 |
11753 if (initializer_zerop (exp)) | 12282 if (initializer_zerop (exp)) |
11754 return CONST0_RTX (mode); | 12283 return CONST0_RTX (mode); |
11755 | 12284 |
11756 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp))) | 12285 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp))) |
11757 return const_vector_mask_from_tree (exp); | 12286 return const_vector_mask_from_tree (exp); |
11758 | 12287 |
11759 units = VECTOR_CST_NELTS (exp); | 12288 machine_mode inner = GET_MODE_INNER (mode); |
11760 inner = GET_MODE_INNER (mode); | 12289 |
11761 | 12290 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp), |
11762 v = rtvec_alloc (units); | 12291 VECTOR_CST_NELTS_PER_PATTERN (exp)); |
11763 | 12292 unsigned int count = builder.encoded_nelts (); |
11764 for (i = 0; i < units; ++i) | 12293 for (unsigned int i = 0; i < count; ++i) |
11765 { | 12294 { |
11766 elt = VECTOR_CST_ELT (exp, i); | 12295 tree elt = VECTOR_CST_ELT (exp, i); |
11767 | |
11768 if (TREE_CODE (elt) == REAL_CST) | 12296 if (TREE_CODE (elt) == REAL_CST) |
11769 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt), | 12297 builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt), |
11770 inner); | 12298 inner)); |
11771 else if (TREE_CODE (elt) == FIXED_CST) | 12299 else if (TREE_CODE (elt) == FIXED_CST) |
11772 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt), | 12300 builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt), |
11773 inner); | 12301 inner)); |
11774 else | 12302 else |
11775 RTVEC_ELT (v, i) = immed_wide_int_const (wi::to_wide (elt), inner); | 12303 builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt), |
11776 } | 12304 inner)); |
11777 | 12305 } |
11778 return gen_rtx_CONST_VECTOR (mode, v); | 12306 return builder.build (); |
11779 } | 12307 } |
11780 | 12308 |
11781 /* Build a decl for a personality function given a language prefix. */ | 12309 /* Build a decl for a personality function given a language prefix. */ |
11782 | 12310 |
11783 tree | 12311 tree |