comparison gcc/cselib.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Common subexpression elimination library for GNU compiler. 1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
52 static void remove_useless_values (void); 52 static void remove_useless_values (void);
53 static unsigned int cselib_hash_rtx (rtx, int, machine_mode); 53 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
54 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx); 54 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
55 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx); 55 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
56 static cselib_val *cselib_lookup_mem (rtx, int); 56 static cselib_val *cselib_lookup_mem (rtx, int);
57 static void cselib_invalidate_regno (unsigned int, machine_mode); 57 static void cselib_invalidate_regno (unsigned int, machine_mode,
58 const_rtx = NULL);
58 static void cselib_invalidate_mem (rtx); 59 static void cselib_invalidate_mem (rtx);
59 static void cselib_record_set (rtx, cselib_val *, cselib_val *); 60 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
60 static void cselib_record_sets (rtx_insn *); 61 static void cselib_record_sets (rtx_insn *);
61 62
62 struct expand_value_data 63 struct expand_value_data
803 804
804 case PRE_DEC: 805 case PRE_DEC:
805 if (memmode == VOIDmode) 806 if (memmode == VOIDmode)
806 return x; 807 return x;
807 808
808 *off = GEN_INT (-GET_MODE_SIZE (memmode)); 809 *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
809 return XEXP (x, 0); 810 return XEXP (x, 0);
810 811
811 case PRE_INC: 812 case PRE_INC:
812 if (memmode == VOIDmode) 813 if (memmode == VOIDmode)
813 return x; 814 return x;
814 815
815 *off = GEN_INT (GET_MODE_SIZE (memmode)); 816 *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
816 return XEXP (x, 0); 817 return XEXP (x, 0);
817 818
818 case PRE_MODIFY: 819 case PRE_MODIFY:
819 return XEXP (x, 1); 820 return XEXP (x, 1);
820 821
982 break; 983 break;
983 984
984 case 'n': 985 case 'n':
985 case 'i': 986 case 'i':
986 if (XINT (x, i) != XINT (y, i)) 987 if (XINT (x, i) != XINT (y, i))
988 return 0;
989 break;
990
991 case 'p':
992 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
987 return 0; 993 return 0;
988 break; 994 break;
989 995
990 case 'V': 996 case 'V':
991 case 'E': 997 case 'E':
1061 1067
1062 static unsigned int 1068 static unsigned int
1063 cselib_hash_rtx (rtx x, int create, machine_mode memmode) 1069 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1064 { 1070 {
1065 cselib_val *e; 1071 cselib_val *e;
1072 poly_int64 offset;
1066 int i, j; 1073 int i, j;
1067 enum rtx_code code; 1074 enum rtx_code code;
1068 const char *fmt; 1075 const char *fmt;
1069 unsigned int hash = 0; 1076 unsigned int hash = 0;
1070 1077
1126 case CONST_WIDE_INT: 1133 case CONST_WIDE_INT:
1127 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) 1134 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1128 hash += CONST_WIDE_INT_ELT (x, i); 1135 hash += CONST_WIDE_INT_ELT (x, i);
1129 return hash; 1136 return hash;
1130 1137
1138 case CONST_POLY_INT:
1139 {
1140 inchash::hash h;
1141 h.add_int (hash);
1142 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1143 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1144 return h.end ();
1145 }
1146
1131 case CONST_DOUBLE: 1147 case CONST_DOUBLE:
1132 /* This is like the general case, except that it only counts 1148 /* This is like the general case, except that it only counts
1133 the integers representing the constant. */ 1149 the integers representing the constant. */
1134 hash += (unsigned) code + (unsigned) GET_MODE (x); 1150 hash += (unsigned) code + (unsigned) GET_MODE (x);
1135 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode) 1151 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1147 case CONST_VECTOR: 1163 case CONST_VECTOR:
1148 { 1164 {
1149 int units; 1165 int units;
1150 rtx elt; 1166 rtx elt;
1151 1167
1152 units = CONST_VECTOR_NUNITS (x); 1168 units = const_vector_encoded_nelts (x);
1153 1169
1154 for (i = 0; i < units; ++i) 1170 for (i = 0; i < units; ++i)
1155 { 1171 {
1156 elt = CONST_VECTOR_ELT (x, i); 1172 elt = CONST_VECTOR_ENCODED_ELT (x, i);
1157 hash += cselib_hash_rtx (elt, 0, memmode); 1173 hash += cselib_hash_rtx (elt, 0, memmode);
1158 } 1174 }
1159 1175
1160 return hash; 1176 return hash;
1161 } 1177 }
1187 1203
1188 case PRE_DEC: 1204 case PRE_DEC:
1189 case PRE_INC: 1205 case PRE_INC:
1190 /* We can't compute these without knowing the MEM mode. */ 1206 /* We can't compute these without knowing the MEM mode. */
1191 gcc_assert (memmode != VOIDmode); 1207 gcc_assert (memmode != VOIDmode);
1192 i = GET_MODE_SIZE (memmode); 1208 offset = GET_MODE_SIZE (memmode);
1193 if (code == PRE_DEC) 1209 if (code == PRE_DEC)
1194 i = -i; 1210 offset = -offset;
1195 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes 1211 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1196 like (mem:MEMMODE (plus (reg) (const_int I))). */ 1212 like (mem:MEMMODE (plus (reg) (const_int I))). */
1197 hash += (unsigned) PLUS - (unsigned)code 1213 hash += (unsigned) PLUS - (unsigned)code
1198 + cselib_hash_rtx (XEXP (x, 0), create, memmode) 1214 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1199 + cselib_hash_rtx (GEN_INT (i), create, memmode); 1215 + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1216 create, memmode);
1200 return hash ? hash : 1 + (unsigned) PLUS; 1217 return hash ? hash : 1 + (unsigned) PLUS;
1201 1218
1202 case PRE_MODIFY: 1219 case PRE_MODIFY:
1203 gcc_assert (memmode != VOIDmode); 1220 gcc_assert (memmode != VOIDmode);
1204 return cselib_hash_rtx (XEXP (x, 1), create, memmode); 1221 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1265 break; 1282 break;
1266 } 1283 }
1267 1284
1268 case 'i': 1285 case 'i':
1269 hash += XINT (x, i); 1286 hash += XINT (x, i);
1287 break;
1288
1289 case 'p':
1290 hash += constant_lower_bound (SUBREG_BYTE (x));
1270 break; 1291 break;
1271 1292
1272 case '0': 1293 case '0':
1273 case 't': 1294 case 't':
1274 /* unused */ 1295 /* unused */
1639 case CC0: 1660 case CC0:
1640 case SCRATCH: 1661 case SCRATCH:
1641 /* SCRATCH must be shared because they represent distinct values. */ 1662 /* SCRATCH must be shared because they represent distinct values. */
1642 return orig; 1663 return orig;
1643 case CLOBBER: 1664 case CLOBBER:
1665 case CLOBBER_HIGH:
1644 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))) 1666 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1645 return orig; 1667 return orig;
1646 break; 1668 break;
1647 1669
1648 case CONST: 1670 case CONST:
1851 const char *fmt = GET_RTX_FORMAT (code); 1873 const char *fmt = GET_RTX_FORMAT (code);
1852 cselib_val *e; 1874 cselib_val *e;
1853 struct elt_list *l; 1875 struct elt_list *l;
1854 rtx copy = x; 1876 rtx copy = x;
1855 int i; 1877 int i;
1878 poly_int64 offset;
1856 1879
1857 switch (code) 1880 switch (code)
1858 { 1881 {
1859 case REG: 1882 case REG:
1860 l = REG_VALUES (REGNO (x)); 1883 l = REG_VALUES (REGNO (x));
1887 return x; 1910 return x;
1888 1911
1889 case PRE_DEC: 1912 case PRE_DEC:
1890 case PRE_INC: 1913 case PRE_INC:
1891 gcc_assert (memmode != VOIDmode); 1914 gcc_assert (memmode != VOIDmode);
1892 i = GET_MODE_SIZE (memmode); 1915 offset = GET_MODE_SIZE (memmode);
1893 if (code == PRE_DEC) 1916 if (code == PRE_DEC)
1894 i = -i; 1917 offset = -offset;
1895 return cselib_subst_to_values (plus_constant (GET_MODE (x), 1918 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1896 XEXP (x, 0), i), 1919 XEXP (x, 0), offset),
1897 memmode); 1920 memmode);
1898 1921
1899 case PRE_MODIFY: 1922 case PRE_MODIFY:
1900 gcc_assert (memmode != VOIDmode); 1923 gcc_assert (memmode != VOIDmode);
1901 return cselib_subst_to_values (XEXP (x, 1), memmode); 1924 return cselib_subst_to_values (XEXP (x, 1), memmode);
2140 is used to determine how many hard registers are being changed. If MODE 2163 is used to determine how many hard registers are being changed. If MODE
2141 is VOIDmode, then only REGNO is being changed; this is used when 2164 is VOIDmode, then only REGNO is being changed; this is used when
2142 invalidating call clobbered registers across a call. */ 2165 invalidating call clobbered registers across a call. */
2143 2166
2144 static void 2167 static void
2145 cselib_invalidate_regno (unsigned int regno, machine_mode mode) 2168 cselib_invalidate_regno (unsigned int regno, machine_mode mode,
2169 const_rtx setter)
2146 { 2170 {
2147 unsigned int endregno; 2171 unsigned int endregno;
2148 unsigned int i; 2172 unsigned int i;
2149 2173
2150 /* If we see pseudos after reload, something is _wrong_. */ 2174 /* If we see pseudos after reload, something is _wrong_. */
2163 i = 0; 2187 i = 0;
2164 else 2188 else
2165 i = regno - max_value_regs; 2189 i = regno - max_value_regs;
2166 2190
2167 endregno = end_hard_regno (mode, regno); 2191 endregno = end_hard_regno (mode, regno);
2192
2193 if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2194 gcc_assert (endregno == regno + 1);
2168 } 2195 }
2169 else 2196 else
2170 { 2197 {
2171 i = regno; 2198 i = regno;
2172 endregno = regno + 1; 2199 endregno = regno + 1;
2193 || (v == cfa_base_preserved_val 2220 || (v == cfa_base_preserved_val
2194 && i == cfa_base_preserved_regno)) 2221 && i == cfa_base_preserved_regno))
2195 { 2222 {
2196 l = &(*l)->next; 2223 l = &(*l)->next;
2197 continue; 2224 continue;
2225 }
2226
2227 /* Ignore if clobber high and the register isn't clobbered. */
2228 if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2229 {
2230 gcc_assert (endregno == regno + 1);
2231 const_rtx x = XEXP (setter, 0);
2232 if (!reg_is_clobbered_by_clobber_high (i, GET_MODE (v->val_rtx),
2233 x))
2234 {
2235 l = &(*l)->next;
2236 continue;
2237 }
2198 } 2238 }
2199 2239
2200 /* We have an overlap. */ 2240 /* We have an overlap. */
2201 if (*l == REG_VALUES (i)) 2241 if (*l == REG_VALUES (i))
2202 { 2242 {
2329 v->next_containing_mem = NULL; 2369 v->next_containing_mem = NULL;
2330 } 2370 }
2331 *vp = &dummy_val; 2371 *vp = &dummy_val;
2332 } 2372 }
2333 2373
2334 /* Invalidate DEST, which is being assigned to or clobbered. */ 2374 /* Invalidate DEST, which is being assigned to or clobbered by SETTER. */
2335 2375
2336 void 2376 void
2337 cselib_invalidate_rtx (rtx dest) 2377 cselib_invalidate_rtx (rtx dest, const_rtx setter)
2338 { 2378 {
2339 while (GET_CODE (dest) == SUBREG 2379 while (GET_CODE (dest) == SUBREG
2340 || GET_CODE (dest) == ZERO_EXTRACT 2380 || GET_CODE (dest) == ZERO_EXTRACT
2341 || GET_CODE (dest) == STRICT_LOW_PART) 2381 || GET_CODE (dest) == STRICT_LOW_PART)
2342 dest = XEXP (dest, 0); 2382 dest = XEXP (dest, 0);
2343 2383
2344 if (REG_P (dest)) 2384 if (REG_P (dest))
2345 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest)); 2385 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest), setter);
2346 else if (MEM_P (dest)) 2386 else if (MEM_P (dest))
2347 cselib_invalidate_mem (dest); 2387 cselib_invalidate_mem (dest);
2348 } 2388 }
2349 2389
2350 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */ 2390 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2351 2391
2352 static void 2392 static void
2353 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED, 2393 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx setter,
2354 void *data ATTRIBUTE_UNUSED) 2394 void *data ATTRIBUTE_UNUSED)
2355 { 2395 {
2356 cselib_invalidate_rtx (dest); 2396 cselib_invalidate_rtx (dest, setter);
2357 } 2397 }
2358 2398
2359 /* Record the result of a SET instruction. DEST is being set; the source 2399 /* Record the result of a SET instruction. DEST is being set; the source
2360 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT 2400 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2361 describes its address. */ 2401 describes its address. */
2479 int i; 2519 int i;
2480 struct cselib_set sets[MAX_SETS]; 2520 struct cselib_set sets[MAX_SETS];
2481 rtx body = PATTERN (insn); 2521 rtx body = PATTERN (insn);
2482 rtx cond = 0; 2522 rtx cond = 0;
2483 int n_sets_before_autoinc; 2523 int n_sets_before_autoinc;
2524 int n_strict_low_parts = 0;
2484 struct cselib_record_autoinc_data data; 2525 struct cselib_record_autoinc_data data;
2485 2526
2486 body = PATTERN (insn); 2527 body = PATTERN (insn);
2487 if (GET_CODE (body) == COND_EXEC) 2528 if (GET_CODE (body) == COND_EXEC)
2488 { 2529 {
2533 /* Look up the values that are read. Do this before invalidating the 2574 /* Look up the values that are read. Do this before invalidating the
2534 locations that are written. */ 2575 locations that are written. */
2535 for (i = 0; i < n_sets; i++) 2576 for (i = 0; i < n_sets; i++)
2536 { 2577 {
2537 rtx dest = sets[i].dest; 2578 rtx dest = sets[i].dest;
2579 rtx orig = dest;
2538 2580
2539 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for 2581 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2540 the low part after invalidating any knowledge about larger modes. */ 2582 the low part after invalidating any knowledge about larger modes. */
2541 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART) 2583 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2542 sets[i].dest = dest = XEXP (dest, 0); 2584 sets[i].dest = dest = XEXP (dest, 0);
2558 GET_MODE (dest)); 2600 GET_MODE (dest));
2559 } 2601 }
2560 else 2602 else
2561 sets[i].dest_addr_elt = 0; 2603 sets[i].dest_addr_elt = 0;
2562 } 2604 }
2605
2606 /* Improve handling of STRICT_LOW_PART if the current value is known
2607 to be const0_rtx, then the low bits will be set to dest and higher
2608 bits will remain zero. Used in code like:
2609
2610 {di:SI=0;clobber flags:CC;}
2611 flags:CCNO=cmp(bx:SI,0)
2612 strict_low_part(di:QI)=flags:CCNO<=0
2613
2614 where we can note both that di:QI=flags:CCNO<=0 and
2615 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2616 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2617 scalar_int_mode mode;
2618 if (dest != orig
2619 && cselib_record_sets_hook
2620 && REG_P (dest)
2621 && HARD_REGISTER_P (dest)
2622 && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2623 && n_sets + n_strict_low_parts < MAX_SETS)
2624 {
2625 opt_scalar_int_mode wider_mode_iter;
2626 FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2627 {
2628 scalar_int_mode wider_mode = wider_mode_iter.require ();
2629 if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2630 break;
2631
2632 rtx reg = gen_lowpart (wider_mode, dest);
2633 if (!REG_P (reg))
2634 break;
2635
2636 cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2637 if (!v)
2638 continue;
2639
2640 struct elt_loc_list *l;
2641 for (l = v->locs; l; l = l->next)
2642 if (l->loc == const0_rtx)
2643 break;
2644
2645 if (!l)
2646 continue;
2647
2648 sets[n_sets + n_strict_low_parts].dest = reg;
2649 sets[n_sets + n_strict_low_parts].src = dest;
2650 sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2651 break;
2652 }
2653 }
2563 } 2654 }
2564 2655
2565 if (cselib_record_sets_hook) 2656 if (cselib_record_sets_hook)
2566 cselib_record_sets_hook (insn, sets, n_sets); 2657 cselib_record_sets_hook (insn, sets, n_sets);
2567 2658
2601 { 2692 {
2602 rtx dest = sets[i].dest; 2693 rtx dest = sets[i].dest;
2603 if (REG_P (dest) 2694 if (REG_P (dest)
2604 || (MEM_P (dest) && cselib_record_memory)) 2695 || (MEM_P (dest) && cselib_record_memory))
2605 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt); 2696 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2697 }
2698
2699 /* And deal with STRICT_LOW_PART. */
2700 for (i = 0; i < n_strict_low_parts; i++)
2701 {
2702 if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
2703 continue;
2704 machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
2705 cselib_val *v
2706 = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
2707 cselib_preserve_value (v);
2708 rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
2709 sets[n_sets + i].src_elt->val_rtx);
2710 cselib_add_permanent_equiv (v, r, insn);
2606 } 2711 }
2607 } 2712 }
2608 2713
2609 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */ 2714 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
2610 2715
2687 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only 2792 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2688 after we have processed the insn. */ 2793 after we have processed the insn. */
2689 if (CALL_P (insn)) 2794 if (CALL_P (insn))
2690 { 2795 {
2691 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1)) 2796 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2692 if (GET_CODE (XEXP (x, 0)) == CLOBBER) 2797 {
2693 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0)); 2798 gcc_assert (GET_CODE (XEXP (x, 0)) != CLOBBER_HIGH);
2694 /* Flush evertything on setjmp. */ 2799 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2800 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2801 }
2802 /* Flush everything on setjmp. */
2695 if (cselib_preserve_constants 2803 if (cselib_preserve_constants
2696 && find_reg_note (insn, REG_SETJMP, NULL)) 2804 && find_reg_note (insn, REG_SETJMP, NULL))
2697 { 2805 {
2698 cselib_preserve_only_values (); 2806 cselib_preserve_only_values ();
2699 cselib_reset_table (next_uid); 2807 cselib_reset_table (next_uid);