comparison gcc/ira-lives.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* IRA processing allocno lives to build allocno live ranges. 1 /* IRA processing allocno lives to build allocno live ranges.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010 2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>. 3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5 4
6 This file is part of GCC. 5 This file is part of GCC.
7 6
8 GCC is free software; you can redistribute it and/or modify it under 7 GCC is free software; you can redistribute it and/or modify it under
20 <http://www.gnu.org/licenses/>. */ 19 <http://www.gnu.org/licenses/>. */
21 20
22 #include "config.h" 21 #include "config.h"
23 #include "system.h" 22 #include "system.h"
24 #include "coretypes.h" 23 #include "coretypes.h"
25 #include "tm.h" 24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
26 #include "regs.h" 32 #include "regs.h"
27 #include "rtl.h" 33 #include "ira.h"
28 #include "tm_p.h" 34 #include "ira-int.h"
29 #include "target.h"
30 #include "flags.h"
31 #include "except.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "params.h"
38 #include "df.h"
39 #include "sbitmap.h"
40 #include "sparseset.h" 35 #include "sparseset.h"
41 #include "ira-int.h"
42 36
43 /* The code in this file is similar to one in global but the code 37 /* The code in this file is similar to one in global but the code
44 works on the allocno basis and creates live ranges instead of 38 works on the allocno basis and creates live ranges instead of
45 pseudo-register conflicts. */ 39 pseudo-register conflicts. */
46 40
62 56
63 /* Point where register pressure excess started or -1 if there is no 57 /* Point where register pressure excess started or -1 if there is no
64 register pressure excess. Excess pressure for a register class at 58 register pressure excess. Excess pressure for a register class at
65 some point means that there are more allocnos of given register 59 some point means that there are more allocnos of given register
66 class living at the point than number of hard-registers of the 60 class living at the point than number of hard-registers of the
67 class available for the allocation. It is defined only for cover 61 class available for the allocation. It is defined only for
68 classes. */ 62 pressure classes. */
69 static int high_pressure_start_point[N_REG_CLASSES]; 63 static int high_pressure_start_point[N_REG_CLASSES];
70 64
71 /* Objects live at current point in the scan. */ 65 /* Objects live at current point in the scan. */
72 static sparseset objects_live; 66 static sparseset objects_live;
73 67
84 /* The number of the last processed call. */ 78 /* The number of the last processed call. */
85 static int last_call_num; 79 static int last_call_num;
86 /* The number of last call at which given allocno was saved. */ 80 /* The number of last call at which given allocno was saved. */
87 static int *allocno_saved_at_call; 81 static int *allocno_saved_at_call;
88 82
83 /* The value of get_preferred_alternatives for the current instruction,
84 supplemental to recog_data. */
85 static alternative_mask preferred_alternatives;
86
89 /* Record the birth of hard register REGNO, updating hard_regs_live and 87 /* Record the birth of hard register REGNO, updating hard_regs_live and
90 hard reg conflict information for living allocnos. */ 88 hard reg conflict information for living allocnos. */
91 static void 89 static void
92 make_hard_regno_born (int regno) 90 make_hard_regno_born (int regno)
93 { 91 {
95 93
96 SET_HARD_REG_BIT (hard_regs_live, regno); 94 SET_HARD_REG_BIT (hard_regs_live, regno);
97 EXECUTE_IF_SET_IN_SPARSESET (objects_live, i) 95 EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
98 { 96 {
99 ira_object_t obj = ira_object_id_map[i]; 97 ira_object_t obj = ira_object_id_map[i];
98
100 SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno); 99 SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno);
101 SET_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), regno); 100 SET_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), regno);
102 } 101 }
103 } 102 }
104 103
132 static void 131 static void
133 update_allocno_pressure_excess_length (ira_object_t obj) 132 update_allocno_pressure_excess_length (ira_object_t obj)
134 { 133 {
135 ira_allocno_t a = OBJECT_ALLOCNO (obj); 134 ira_allocno_t a = OBJECT_ALLOCNO (obj);
136 int start, i; 135 int start, i;
137 enum reg_class cover_class, cl; 136 enum reg_class aclass, pclass, cl;
138 live_range_t p; 137 live_range_t p;
139 138
140 cover_class = ALLOCNO_COVER_CLASS (a); 139 aclass = ALLOCNO_CLASS (a);
140 pclass = ira_pressure_class_translate[aclass];
141 for (i = 0; 141 for (i = 0;
142 (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES; 142 (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
143 i++) 143 i++)
144 { 144 {
145 if (! ira_reg_pressure_class_p[cl])
146 continue;
145 if (high_pressure_start_point[cl] < 0) 147 if (high_pressure_start_point[cl] < 0)
146 continue; 148 continue;
147 p = OBJECT_LIVE_RANGES (obj); 149 p = OBJECT_LIVE_RANGES (obj);
148 ira_assert (p != NULL); 150 ira_assert (p != NULL);
149 start = (high_pressure_start_point[cl] > p->start 151 start = (high_pressure_start_point[cl] > p->start
164 ira_assert (lr != NULL); 166 ira_assert (lr != NULL);
165 lr->finish = curr_point; 167 lr->finish = curr_point;
166 update_allocno_pressure_excess_length (obj); 168 update_allocno_pressure_excess_length (obj);
167 } 169 }
168 170
169 /* The current register pressures for each cover class for the current 171 /* The current register pressures for each pressure class for the current
170 basic block. */ 172 basic block. */
171 static int curr_reg_pressure[N_REG_CLASSES]; 173 static int curr_reg_pressure[N_REG_CLASSES];
172 174
173 /* Record that register pressure for COVER_CLASS increased by N 175 /* Record that register pressure for PCLASS increased by N registers.
174 registers. Update the current register pressure, maximal register 176 Update the current register pressure, maximal register pressure for
175 pressure for the current BB and the start point of the register 177 the current BB and the start point of the register pressure
176 pressure excess. */ 178 excess. */
177 static void 179 static void
178 inc_register_pressure (enum reg_class cover_class, int n) 180 inc_register_pressure (enum reg_class pclass, int n)
179 { 181 {
180 int i; 182 int i;
181 enum reg_class cl; 183 enum reg_class cl;
182 184
183 for (i = 0; 185 for (i = 0;
184 (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES; 186 (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
185 i++) 187 i++)
186 { 188 {
189 if (! ira_reg_pressure_class_p[cl])
190 continue;
187 curr_reg_pressure[cl] += n; 191 curr_reg_pressure[cl] += n;
188 if (high_pressure_start_point[cl] < 0 192 if (high_pressure_start_point[cl] < 0
189 && (curr_reg_pressure[cl] > ira_available_class_regs[cl])) 193 && (curr_reg_pressure[cl] > ira_class_hard_regs_num[cl]))
190 high_pressure_start_point[cl] = curr_point; 194 high_pressure_start_point[cl] = curr_point;
191 if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl]) 195 if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
192 curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl]; 196 curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
193 } 197 }
194 } 198 }
195 199
196 /* Record that register pressure for COVER_CLASS has decreased by 200 /* Record that register pressure for PCLASS has decreased by NREGS
197 NREGS registers; update current register pressure, start point of 201 registers; update current register pressure, start point of the
198 the register pressure excess, and register pressure excess length 202 register pressure excess, and register pressure excess length for
199 for living allocnos. */ 203 living allocnos. */
200 204
201 static void 205 static void
202 dec_register_pressure (enum reg_class cover_class, int nregs) 206 dec_register_pressure (enum reg_class pclass, int nregs)
203 { 207 {
204 int i; 208 int i;
205 unsigned int j; 209 unsigned int j;
206 enum reg_class cl; 210 enum reg_class cl;
207 bool set_p = false; 211 bool set_p = false;
208 212
209 for (i = 0; 213 for (i = 0;
210 (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES; 214 (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
211 i++) 215 i++)
212 { 216 {
217 if (! ira_reg_pressure_class_p[cl])
218 continue;
213 curr_reg_pressure[cl] -= nregs; 219 curr_reg_pressure[cl] -= nregs;
214 ira_assert (curr_reg_pressure[cl] >= 0); 220 ira_assert (curr_reg_pressure[cl] >= 0);
215 if (high_pressure_start_point[cl] >= 0 221 if (high_pressure_start_point[cl] >= 0
216 && curr_reg_pressure[cl] <= ira_available_class_regs[cl]) 222 && curr_reg_pressure[cl] <= ira_class_hard_regs_num[cl])
217 set_p = true; 223 set_p = true;
218 } 224 }
219 if (set_p) 225 if (set_p)
220 { 226 {
221 EXECUTE_IF_SET_IN_SPARSESET (objects_live, j) 227 EXECUTE_IF_SET_IN_SPARSESET (objects_live, j)
222 update_allocno_pressure_excess_length (ira_object_id_map[j]); 228 update_allocno_pressure_excess_length (ira_object_id_map[j]);
223 for (i = 0; 229 for (i = 0;
224 (cl = ira_reg_class_super_classes[cover_class][i]) 230 (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
225 != LIM_REG_CLASSES;
226 i++) 231 i++)
227 if (high_pressure_start_point[cl] >= 0 232 {
228 && curr_reg_pressure[cl] <= ira_available_class_regs[cl]) 233 if (! ira_reg_pressure_class_p[cl])
229 high_pressure_start_point[cl] = -1; 234 continue;
230 } 235 if (high_pressure_start_point[cl] >= 0
236 && curr_reg_pressure[cl] <= ira_class_hard_regs_num[cl])
237 high_pressure_start_point[cl] = -1;
238 }
239 }
240 }
241
242 /* Determine from the objects_live bitmap whether REGNO is currently live,
243 and occupies only one object. Return false if we have no information. */
244 static bool
245 pseudo_regno_single_word_and_live_p (int regno)
246 {
247 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
248 ira_object_t obj;
249
250 if (a == NULL)
251 return false;
252 if (ALLOCNO_NUM_OBJECTS (a) > 1)
253 return false;
254
255 obj = ALLOCNO_OBJECT (a, 0);
256
257 return sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj));
231 } 258 }
232 259
233 /* Mark the pseudo register REGNO as live. Update all information about 260 /* Mark the pseudo register REGNO as live. Update all information about
234 live ranges and register pressure. */ 261 live ranges and register pressure. */
235 static void 262 static void
236 mark_pseudo_regno_live (int regno) 263 mark_pseudo_regno_live (int regno)
237 { 264 {
238 ira_allocno_t a = ira_curr_regno_allocno_map[regno]; 265 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
266 enum reg_class pclass;
239 int i, n, nregs; 267 int i, n, nregs;
240 enum reg_class cl;
241 268
242 if (a == NULL) 269 if (a == NULL)
243 return; 270 return;
244 271
245 /* Invalidate because it is referenced. */ 272 /* Invalidate because it is referenced. */
246 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0; 273 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
247 274
248 n = ALLOCNO_NUM_OBJECTS (a); 275 n = ALLOCNO_NUM_OBJECTS (a);
249 cl = ALLOCNO_COVER_CLASS (a); 276 pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
250 nregs = ira_reg_class_nregs[cl][ALLOCNO_MODE (a)]; 277 nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
251 if (n > 1) 278 if (n > 1)
252 { 279 {
253 /* We track every subobject separately. */ 280 /* We track every subobject separately. */
254 gcc_assert (nregs == n); 281 gcc_assert (nregs == n);
255 nregs = 1; 282 nregs = 1;
256 } 283 }
257 284
258 for (i = 0; i < n; i++) 285 for (i = 0; i < n; i++)
259 { 286 {
260 ira_object_t obj = ALLOCNO_OBJECT (a, i); 287 ira_object_t obj = ALLOCNO_OBJECT (a, i);
288
261 if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj))) 289 if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
262 continue; 290 continue;
263 291
264 inc_register_pressure (cl, nregs); 292 inc_register_pressure (pclass, nregs);
265 make_object_born (obj); 293 make_object_born (obj);
266 } 294 }
267 } 295 }
268 296
269 /* Like mark_pseudo_regno_live, but try to only mark one subword of 297 /* Like mark_pseudo_regno_live, but try to only mark one subword of
271 indicates the low part. */ 299 indicates the low part. */
272 static void 300 static void
273 mark_pseudo_regno_subword_live (int regno, int subword) 301 mark_pseudo_regno_subword_live (int regno, int subword)
274 { 302 {
275 ira_allocno_t a = ira_curr_regno_allocno_map[regno]; 303 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
276 int n, nregs; 304 int n;
277 enum reg_class cl; 305 enum reg_class pclass;
278 ira_object_t obj; 306 ira_object_t obj;
279 307
280 if (a == NULL) 308 if (a == NULL)
281 return; 309 return;
282 310
288 { 316 {
289 mark_pseudo_regno_live (regno); 317 mark_pseudo_regno_live (regno);
290 return; 318 return;
291 } 319 }
292 320
293 cl = ALLOCNO_COVER_CLASS (a); 321 pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
294 nregs = ira_reg_class_nregs[cl][ALLOCNO_MODE (a)]; 322 gcc_assert
295 gcc_assert (nregs == n); 323 (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
296 obj = ALLOCNO_OBJECT (a, subword); 324 obj = ALLOCNO_OBJECT (a, subword);
297 325
298 if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj))) 326 if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
299 return; 327 return;
300 328
301 inc_register_pressure (cl, nregs); 329 inc_register_pressure (pclass, 1);
302 make_object_born (obj); 330 make_object_born (obj);
303 } 331 }
304 332
305 /* Mark the register REG as live. Store a 1 in hard_regs_live for 333 /* Mark the register REG as live. Store a 1 in hard_regs_live for
306 this register, record how many consecutive hardware registers it 334 this register, record how many consecutive hardware registers it
310 { 338 {
311 int regno = REGNO (reg); 339 int regno = REGNO (reg);
312 340
313 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno)) 341 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
314 { 342 {
315 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)]; 343 int last = END_REGNO (reg);
344 enum reg_class aclass, pclass;
316 345
317 while (regno < last) 346 while (regno < last)
318 { 347 {
319 if (! TEST_HARD_REG_BIT (hard_regs_live, regno) 348 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
320 && ! TEST_HARD_REG_BIT (eliminable_regset, regno)) 349 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
321 { 350 {
322 enum reg_class cover_class = ira_hard_regno_cover_class[regno]; 351 aclass = ira_hard_regno_allocno_class[regno];
323 inc_register_pressure (cover_class, 1); 352 pclass = ira_pressure_class_translate[aclass];
353 inc_register_pressure (pclass, 1);
324 make_hard_regno_born (regno); 354 make_hard_regno_born (regno);
325 } 355 }
326 regno++; 356 regno++;
327 } 357 }
328 } 358 }
332 register number; ORIG_REG is the access in the insn, which may be a 362 register number; ORIG_REG is the access in the insn, which may be a
333 subreg. */ 363 subreg. */
334 static void 364 static void
335 mark_pseudo_reg_live (rtx orig_reg, unsigned regno) 365 mark_pseudo_reg_live (rtx orig_reg, unsigned regno)
336 { 366 {
337 if (df_read_modify_subreg_p (orig_reg)) 367 if (read_modify_subreg_p (orig_reg))
338 { 368 {
339 mark_pseudo_regno_subword_live (regno, 369 mark_pseudo_regno_subword_live (regno,
340 subreg_lowpart_p (orig_reg) ? 0 : 1); 370 subreg_lowpart_p (orig_reg) ? 0 : 1);
341 } 371 }
342 else 372 else
373 403
374 /* Invalidate because it is referenced. */ 404 /* Invalidate because it is referenced. */
375 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0; 405 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
376 406
377 n = ALLOCNO_NUM_OBJECTS (a); 407 n = ALLOCNO_NUM_OBJECTS (a);
378 cl = ALLOCNO_COVER_CLASS (a); 408 cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
379 nregs = ira_reg_class_nregs[cl][ALLOCNO_MODE (a)]; 409 nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
380 if (n > 1) 410 if (n > 1)
381 { 411 {
382 /* We track every subobject separately. */ 412 /* We track every subobject separately. */
383 gcc_assert (nregs == n); 413 gcc_assert (nregs == n);
384 nregs = 1; 414 nregs = 1;
398 register dies. SUBWORD indicates which; a value of 0 indicates the low part. */ 428 register dies. SUBWORD indicates which; a value of 0 indicates the low part. */
399 static void 429 static void
400 mark_pseudo_regno_subword_dead (int regno, int subword) 430 mark_pseudo_regno_subword_dead (int regno, int subword)
401 { 431 {
402 ira_allocno_t a = ira_curr_regno_allocno_map[regno]; 432 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
403 int n, nregs; 433 int n;
404 enum reg_class cl; 434 enum reg_class cl;
405 ira_object_t obj; 435 ira_object_t obj;
406 436
407 if (a == NULL) 437 if (a == NULL)
408 return; 438 return;
413 n = ALLOCNO_NUM_OBJECTS (a); 443 n = ALLOCNO_NUM_OBJECTS (a);
414 if (n == 1) 444 if (n == 1)
415 /* The allocno as a whole doesn't die in this case. */ 445 /* The allocno as a whole doesn't die in this case. */
416 return; 446 return;
417 447
418 cl = ALLOCNO_COVER_CLASS (a); 448 cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
419 nregs = ira_reg_class_nregs[cl][ALLOCNO_MODE (a)]; 449 gcc_assert
420 gcc_assert (nregs == n); 450 (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
421 451
422 obj = ALLOCNO_OBJECT (a, subword); 452 obj = ALLOCNO_OBJECT (a, subword);
423 if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj))) 453 if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
424 return; 454 return;
425 455
434 { 464 {
435 int regno = REGNO (reg); 465 int regno = REGNO (reg);
436 466
437 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno)) 467 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
438 { 468 {
439 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)]; 469 int last = END_REGNO (reg);
470 enum reg_class aclass, pclass;
440 471
441 while (regno < last) 472 while (regno < last)
442 { 473 {
443 if (TEST_HARD_REG_BIT (hard_regs_live, regno)) 474 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
444 { 475 {
445 enum reg_class cover_class = ira_hard_regno_cover_class[regno]; 476 aclass = ira_hard_regno_allocno_class[regno];
446 dec_register_pressure (cover_class, 1); 477 pclass = ira_pressure_class_translate[aclass];
478 dec_register_pressure (pclass, 1);
447 make_hard_regno_dead (regno); 479 make_hard_regno_dead (regno);
448 } 480 }
449 regno++; 481 regno++;
450 } 482 }
451 } 483 }
455 register number; ORIG_REG is the access in the insn, which may be a 487 register number; ORIG_REG is the access in the insn, which may be a
456 subreg. */ 488 subreg. */
457 static void 489 static void
458 mark_pseudo_reg_dead (rtx orig_reg, unsigned regno) 490 mark_pseudo_reg_dead (rtx orig_reg, unsigned regno)
459 { 491 {
460 if (df_read_modify_subreg_p (orig_reg)) 492 if (read_modify_subreg_p (orig_reg))
461 { 493 {
462 mark_pseudo_regno_subword_dead (regno, 494 mark_pseudo_regno_subword_dead (regno,
463 subreg_lowpart_p (orig_reg) ? 0 : 1); 495 subreg_lowpart_p (orig_reg) ? 0 : 1);
464 } 496 }
465 else 497 else
481 reg = SUBREG_REG (reg); 513 reg = SUBREG_REG (reg);
482 514
483 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL) 515 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
484 && (GET_CODE (orig_reg) != SUBREG 516 && (GET_CODE (orig_reg) != SUBREG
485 || REGNO (reg) < FIRST_PSEUDO_REGISTER 517 || REGNO (reg) < FIRST_PSEUDO_REGISTER
486 || !df_read_modify_subreg_p (orig_reg))) 518 || !read_modify_subreg_p (orig_reg)))
487 return; 519 return;
488 520
489 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER) 521 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
490 mark_pseudo_reg_dead (orig_reg, REGNO (reg)); 522 mark_pseudo_reg_dead (orig_reg, REGNO (reg));
491 else 523 else
492 mark_hard_reg_dead (reg); 524 mark_hard_reg_dead (reg);
493 } 525 }
494 526
495 /* If REG is a pseudo or a subreg of it, and the class of its allocno 527 /* If REG is a pseudo or a subreg of it, and the class of its allocno
496 intersects CL, make a conflict with pseudo DREG. ORIG_DREG is the 528 intersects CL, make a conflict with pseudo DREG. ORIG_DREG is the
497 rtx actually accessed, it may be indentical to DREG or a subreg of it. 529 rtx actually accessed, it may be identical to DREG or a subreg of it.
498 Advance the current program point before making the conflict if 530 Advance the current program point before making the conflict if
499 ADVANCE_P. Return TRUE if we will need to advance the current 531 ADVANCE_P. Return TRUE if we will need to advance the current
500 program point. */ 532 program point. */
501 static bool 533 static bool
502 make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, rtx orig_dreg, 534 make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, rtx orig_dreg,
510 542
511 if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER) 543 if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
512 return advance_p; 544 return advance_p;
513 545
514 a = ira_curr_regno_allocno_map[REGNO (reg)]; 546 a = ira_curr_regno_allocno_map[REGNO (reg)];
515 if (! reg_classes_intersect_p (cl, ALLOCNO_COVER_CLASS (a))) 547 if (! reg_classes_intersect_p (cl, ALLOCNO_CLASS (a)))
516 return advance_p; 548 return advance_p;
517 549
518 if (advance_p) 550 if (advance_p)
519 curr_point++; 551 curr_point++;
520 552
526 return false; 558 return false;
527 } 559 }
528 560
529 /* Check and make if necessary conflicts for pseudo DREG of class 561 /* Check and make if necessary conflicts for pseudo DREG of class
530 DEF_CL of the current insn with input operand USE of class USE_CL. 562 DEF_CL of the current insn with input operand USE of class USE_CL.
531 ORIG_DREG is the rtx actually accessed, it may be indentical to 563 ORIG_DREG is the rtx actually accessed, it may be identical to
532 DREG or a subreg of it. Advance the current program point before 564 DREG or a subreg of it. Advance the current program point before
533 making the conflict if ADVANCE_P. Return TRUE if we will need to 565 making the conflict if ADVANCE_P. Return TRUE if we will need to
534 advance the current program point. */ 566 advance the current program point. */
535 static bool 567 static bool
536 check_and_make_def_use_conflict (rtx dreg, rtx orig_dreg, 568 check_and_make_def_use_conflict (rtx dreg, rtx orig_dreg,
583 615
584 if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER) 616 if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER)
585 return; 617 return;
586 618
587 a = ira_curr_regno_allocno_map[REGNO (dreg)]; 619 a = ira_curr_regno_allocno_map[REGNO (dreg)];
588 acl = ALLOCNO_COVER_CLASS (a); 620 acl = ALLOCNO_CLASS (a);
589 if (! reg_classes_intersect_p (acl, def_cl)) 621 if (! reg_classes_intersect_p (acl, def_cl))
590 return; 622 return;
591 623
592 advance_p = true; 624 advance_p = true;
593 625
594 for (use = 0; use < recog_data.n_operands; use++) 626 int n_operands = recog_data.n_operands;
627 const operand_alternative *op_alt = &recog_op_alt[alt * n_operands];
628 for (use = 0; use < n_operands; use++)
595 { 629 {
596 int alt1; 630 int alt1;
597 631
598 if (use == def || recog_data.operand_type[use] == OP_OUT) 632 if (use == def || recog_data.operand_type[use] == OP_OUT)
599 continue; 633 continue;
600 634
601 if (recog_op_alt[use][alt].anything_ok) 635 if (op_alt[use].anything_ok)
602 use_cl = ALL_REGS; 636 use_cl = ALL_REGS;
603 else 637 else
604 use_cl = recog_op_alt[use][alt].cl; 638 use_cl = op_alt[use].cl;
605 639
606 /* If there's any alternative that allows USE to match DEF, do not 640 /* If there's any alternative that allows USE to match DEF, do not
607 record a conflict. If that causes us to create an invalid 641 record a conflict. If that causes us to create an invalid
608 instruction due to the earlyclobber, reload must fix it up. */ 642 instruction due to the earlyclobber, reload must fix it up. */
609 for (alt1 = 0; alt1 < recog_data.n_alternatives; alt1++) 643 for (alt1 = 0; alt1 < recog_data.n_alternatives; alt1++)
610 if (recog_op_alt[use][alt1].matches == def 644 {
611 || (use < recog_data.n_operands - 1 645 if (!TEST_BIT (preferred_alternatives, alt1))
612 && recog_data.constraints[use][0] == '%' 646 continue;
613 && recog_op_alt[use + 1][alt1].matches == def) 647 const operand_alternative *op_alt1
614 || (use >= 1 648 = &recog_op_alt[alt1 * n_operands];
615 && recog_data.constraints[use - 1][0] == '%' 649 if (op_alt1[use].matches == def
616 && recog_op_alt[use - 1][alt1].matches == def)) 650 || (use < n_operands - 1
617 break; 651 && recog_data.constraints[use][0] == '%'
652 && op_alt1[use + 1].matches == def)
653 || (use >= 1
654 && recog_data.constraints[use - 1][0] == '%'
655 && op_alt1[use - 1].matches == def))
656 break;
657 }
618 658
619 if (alt1 < recog_data.n_alternatives) 659 if (alt1 < recog_data.n_alternatives)
620 continue; 660 continue;
621 661
622 advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl, 662 advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
623 use, use_cl, advance_p); 663 use, use_cl, advance_p);
624 664
625 if ((use_match = recog_op_alt[use][alt].matches) >= 0) 665 if ((use_match = op_alt[use].matches) >= 0)
626 { 666 {
627 if (use_match == def) 667 if (use_match == def)
628 continue; 668 continue;
629 669
630 if (recog_op_alt[use_match][alt].anything_ok) 670 if (op_alt[use_match].anything_ok)
631 use_cl = ALL_REGS; 671 use_cl = ALL_REGS;
632 else 672 else
633 use_cl = recog_op_alt[use_match][alt].cl; 673 use_cl = op_alt[use_match].cl;
634 advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl, 674 advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
635 use, use_cl, advance_p); 675 use, use_cl, advance_p);
636 } 676 }
637 } 677 }
638 } 678 }
646 { 686 {
647 int alt; 687 int alt;
648 int def, def_match; 688 int def, def_match;
649 enum reg_class def_cl; 689 enum reg_class def_cl;
650 690
651 for (alt = 0; alt < recog_data.n_alternatives; alt++) 691 int n_alternatives = recog_data.n_alternatives;
652 for (def = 0; def < recog_data.n_operands; def++) 692 int n_operands = recog_data.n_operands;
653 { 693 const operand_alternative *op_alt = recog_op_alt;
654 def_cl = NO_REGS; 694 for (alt = 0; alt < n_alternatives; alt++, op_alt += n_operands)
655 if (recog_op_alt[def][alt].earlyclobber) 695 if (TEST_BIT (preferred_alternatives, alt))
656 { 696 for (def = 0; def < n_operands; def++)
657 if (recog_op_alt[def][alt].anything_ok) 697 {
658 def_cl = ALL_REGS; 698 def_cl = NO_REGS;
659 else 699 if (op_alt[def].earlyclobber)
660 def_cl = recog_op_alt[def][alt].cl; 700 {
661 check_and_make_def_conflict (alt, def, def_cl); 701 if (op_alt[def].anything_ok)
662 } 702 def_cl = ALL_REGS;
663 if ((def_match = recog_op_alt[def][alt].matches) >= 0 703 else
664 && (recog_op_alt[def_match][alt].earlyclobber 704 def_cl = op_alt[def].cl;
665 || recog_op_alt[def][alt].earlyclobber)) 705 check_and_make_def_conflict (alt, def, def_cl);
666 { 706 }
667 if (recog_op_alt[def_match][alt].anything_ok) 707 if ((def_match = op_alt[def].matches) >= 0
668 def_cl = ALL_REGS; 708 && (op_alt[def_match].earlyclobber
669 else 709 || op_alt[def].earlyclobber))
670 def_cl = recog_op_alt[def_match][alt].cl; 710 {
671 check_and_make_def_conflict (alt, def, def_cl); 711 if (op_alt[def_match].anything_ok)
672 } 712 def_cl = ALL_REGS;
673 } 713 else
714 def_cl = op_alt[def_match].cl;
715 check_and_make_def_conflict (alt, def, def_cl);
716 }
717 }
674 } 718 }
675 719
676 /* Mark early clobber hard registers of the current INSN as live (if 720 /* Mark early clobber hard registers of the current INSN as live (if
677 LIVE_P) or dead. Return true if there are such registers. */ 721 LIVE_P) or dead. Return true if there are such registers. */
678 static bool 722 static bool
679 mark_hard_reg_early_clobbers (rtx insn, bool live_p) 723 mark_hard_reg_early_clobbers (rtx_insn *insn, bool live_p)
680 { 724 {
681 df_ref *def_rec; 725 df_ref def;
682 bool set_p = false; 726 bool set_p = false;
683 727
684 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++) 728 FOR_EACH_INSN_DEF (def, insn)
685 if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER)) 729 if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
686 { 730 {
687 rtx dreg = DF_REF_REG (*def_rec); 731 rtx dreg = DF_REF_REG (def);
688 732
689 if (GET_CODE (dreg) == SUBREG) 733 if (GET_CODE (dreg) == SUBREG)
690 dreg = SUBREG_REG (dreg); 734 dreg = SUBREG_REG (dreg);
691 if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER) 735 if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER)
692 continue; 736 continue;
693 737
694 /* Hard register clobbers are believed to be early clobber 738 /* Hard register clobbers are believed to be early clobber
695 because there is no way to say that non-operand hard 739 because there is no way to say that non-operand hard
696 register clobbers are not early ones. */ 740 register clobbers are not early ones. */
697 if (live_p) 741 if (live_p)
698 mark_ref_live (*def_rec); 742 mark_ref_live (def);
699 else 743 else
700 mark_ref_dead (*def_rec); 744 mark_ref_dead (def);
701 set_p = true; 745 set_p = true;
702 } 746 }
703 747
704 return set_p; 748 return set_p;
705 } 749 }
708 it is so, the function returns the class of the hard register. 752 it is so, the function returns the class of the hard register.
709 Otherwise it returns NO_REGS. */ 753 Otherwise it returns NO_REGS. */
710 static enum reg_class 754 static enum reg_class
711 single_reg_class (const char *constraints, rtx op, rtx equiv_const) 755 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
712 { 756 {
713 int ignore_p; 757 int c;
714 enum reg_class cl, next_cl; 758 enum reg_class cl, next_cl;
715 int c; 759 enum constraint_num cn;
716 760
717 cl = NO_REGS; 761 cl = NO_REGS;
718 for (ignore_p = false; 762 alternative_mask preferred = preferred_alternatives;
719 (c = *constraints); 763 for (; (c = *constraints); constraints += CONSTRAINT_LEN (c, constraints))
720 constraints += CONSTRAINT_LEN (c, constraints))
721 if (c == '#') 764 if (c == '#')
722 ignore_p = true; 765 preferred &= ~ALTERNATIVE_BIT (0);
723 else if (c == ',') 766 else if (c == ',')
724 ignore_p = false; 767 preferred >>= 1;
725 else if (! ignore_p) 768 else if (preferred & 1)
726 switch (c) 769 switch (c)
727 { 770 {
728 case ' ': 771 case 'g':
729 case '\t': 772 return NO_REGS;
730 case '=': 773
731 case '+': 774 default:
732 case '*': 775 /* ??? Is this the best way to handle memory constraints? */
733 case '&': 776 cn = lookup_constraint (constraints);
734 case '%': 777 if (insn_extra_memory_constraint (cn)
735 case '!': 778 || insn_extra_special_memory_constraint (cn)
736 case '?': 779 || insn_extra_address_constraint (cn))
737 break;
738 case 'i':
739 if (CONSTANT_P (op)
740 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
741 return NO_REGS; 780 return NO_REGS;
742 break; 781 if (constraint_satisfied_p (op, cn)
743
744 case 'n':
745 if (CONST_INT_P (op)
746 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
747 || (equiv_const != NULL_RTX
748 && (CONST_INT_P (equiv_const)
749 || (GET_CODE (equiv_const) == CONST_DOUBLE
750 && GET_MODE (equiv_const) == VOIDmode))))
751 return NO_REGS;
752 break;
753
754 case 's':
755 if ((CONSTANT_P (op) && !CONST_INT_P (op)
756 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
757 || (equiv_const != NULL_RTX 782 || (equiv_const != NULL_RTX
758 && CONSTANT_P (equiv_const) 783 && CONSTANT_P (equiv_const)
759 && !CONST_INT_P (equiv_const) 784 && constraint_satisfied_p (equiv_const, cn)))
760 && (GET_CODE (equiv_const) != CONST_DOUBLE
761 || GET_MODE (equiv_const) != VOIDmode)))
762 return NO_REGS; 785 return NO_REGS;
763 break; 786 next_cl = reg_class_for_constraint (cn);
764 787 if (next_cl == NO_REGS)
765 case 'I': 788 break;
766 case 'J': 789 if (cl == NO_REGS
767 case 'K': 790 ? ira_class_singleton[next_cl][GET_MODE (op)] < 0
768 case 'L': 791 : (ira_class_singleton[cl][GET_MODE (op)]
769 case 'M': 792 != ira_class_singleton[next_cl][GET_MODE (op)]))
770 case 'N':
771 case 'O':
772 case 'P':
773 if ((CONST_INT_P (op)
774 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
775 || (equiv_const != NULL_RTX
776 && CONST_INT_P (equiv_const)
777 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
778 c, constraints)))
779 return NO_REGS;
780 break;
781
782 case 'E':
783 case 'F':
784 if (GET_CODE (op) == CONST_DOUBLE
785 || (GET_CODE (op) == CONST_VECTOR
786 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
787 || (equiv_const != NULL_RTX
788 && (GET_CODE (equiv_const) == CONST_DOUBLE
789 || (GET_CODE (equiv_const) == CONST_VECTOR
790 && (GET_MODE_CLASS (GET_MODE (equiv_const))
791 == MODE_VECTOR_FLOAT)))))
792 return NO_REGS;
793 break;
794
795 case 'G':
796 case 'H':
797 if ((GET_CODE (op) == CONST_DOUBLE
798 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
799 || (equiv_const != NULL_RTX
800 && GET_CODE (equiv_const) == CONST_DOUBLE
801 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
802 c, constraints)))
803 return NO_REGS;
804 /* ??? what about memory */
805 case 'r':
806 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
807 case 'h': case 'j': case 'k': case 'l':
808 case 'q': case 't': case 'u':
809 case 'v': case 'w': case 'x': case 'y': case 'z':
810 case 'A': case 'B': case 'C': case 'D':
811 case 'Q': case 'R': case 'S': case 'T': case 'U':
812 case 'W': case 'Y': case 'Z':
813 next_cl = (c == 'r'
814 ? GENERAL_REGS
815 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
816 if ((cl != NO_REGS && next_cl != cl)
817 || (ira_available_class_regs[next_cl]
818 > ira_reg_class_nregs[next_cl][GET_MODE (op)]))
819 return NO_REGS; 793 return NO_REGS;
820 cl = next_cl; 794 cl = next_cl;
821 break; 795 break;
822 796
823 case '0': case '1': case '2': case '3': case '4': 797 case '0': case '1': case '2': case '3': case '4':
824 case '5': case '6': case '7': case '8': case '9': 798 case '5': case '6': case '7': case '8': case '9':
825 next_cl 799 next_cl
826 = single_reg_class (recog_data.constraints[c - '0'], 800 = single_reg_class (recog_data.constraints[c - '0'],
827 recog_data.operand[c - '0'], NULL_RTX); 801 recog_data.operand[c - '0'], NULL_RTX);
828 if ((cl != NO_REGS && next_cl != cl) 802 if (cl == NO_REGS
829 || next_cl == NO_REGS 803 ? ira_class_singleton[next_cl][GET_MODE (op)] < 0
830 || (ira_available_class_regs[next_cl] 804 : (ira_class_singleton[cl][GET_MODE (op)]
831 > ira_reg_class_nregs[next_cl][GET_MODE (op)])) 805 != ira_class_singleton[next_cl][GET_MODE (op)]))
832 return NO_REGS; 806 return NO_REGS;
833 cl = next_cl; 807 cl = next_cl;
834 break; 808 break;
835
836 default:
837 return NO_REGS;
838 } 809 }
839 return cl; 810 return cl;
840 } 811 }
841 812
842 /* The function checks that operand OP_NUM of the current insn can use 813 /* The function checks that operand OP_NUM of the current insn can use
853 824
854 /* The function sets up hard register set *SET to hard registers which 825 /* The function sets up hard register set *SET to hard registers which
855 might be used by insn reloads because the constraints are too 826 might be used by insn reloads because the constraints are too
856 strict. */ 827 strict. */
857 void 828 void
858 ira_implicitly_set_insn_hard_regs (HARD_REG_SET *set) 829 ira_implicitly_set_insn_hard_regs (HARD_REG_SET *set,
830 alternative_mask preferred)
859 { 831 {
860 int i, c, regno = 0; 832 int i, c, regno = 0;
861 bool ignore_p;
862 enum reg_class cl; 833 enum reg_class cl;
863 rtx op; 834 rtx op;
864 enum machine_mode mode; 835 machine_mode mode;
865 836
866 CLEAR_HARD_REG_SET (*set); 837 CLEAR_HARD_REG_SET (*set);
867 for (i = 0; i < recog_data.n_operands; i++) 838 for (i = 0; i < recog_data.n_operands; i++)
868 { 839 {
869 op = recog_data.operand[i]; 840 op = recog_data.operand[i];
877 const char *p = recog_data.constraints[i]; 848 const char *p = recog_data.constraints[i];
878 849
879 mode = (GET_CODE (op) == SCRATCH 850 mode = (GET_CODE (op) == SCRATCH
880 ? GET_MODE (op) : PSEUDO_REGNO_MODE (regno)); 851 ? GET_MODE (op) : PSEUDO_REGNO_MODE (regno));
881 cl = NO_REGS; 852 cl = NO_REGS;
882 for (ignore_p = false; (c = *p); p += CONSTRAINT_LEN (c, p)) 853 for (; (c = *p); p += CONSTRAINT_LEN (c, p))
883 if (c == '#') 854 if (c == '#')
884 ignore_p = true; 855 preferred &= ~ALTERNATIVE_BIT (0);
885 else if (c == ',') 856 else if (c == ',')
886 ignore_p = false; 857 preferred >>= 1;
887 else if (! ignore_p) 858 else if (preferred & 1)
888 switch (c) 859 {
889 { 860 cl = reg_class_for_constraint (lookup_constraint (p));
890 case 'r': 861 if (cl != NO_REGS)
891 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': 862 {
892 case 'h': case 'j': case 'k': case 'l': 863 /* There is no register pressure problem if all of the
893 case 'q': case 't': case 'u': 864 regs in this class are fixed. */
894 case 'v': case 'w': case 'x': case 'y': case 'z': 865 int regno = ira_class_singleton[cl][mode];
895 case 'A': case 'B': case 'C': case 'D': 866 if (regno >= 0)
896 case 'Q': case 'R': case 'S': case 'T': case 'U': 867 add_to_hard_reg_set (set, mode, regno);
897 case 'W': case 'Y': case 'Z': 868 }
898 cl = (c == 'r' 869 }
899 ? GENERAL_REGS
900 : REG_CLASS_FROM_CONSTRAINT (c, p));
901 if (cl != NO_REGS
902 /* There is no register pressure problem if all of the
903 regs in this class are fixed. */
904 && ira_available_class_regs[cl] != 0
905 && (ira_available_class_regs[cl]
906 <= ira_reg_class_nregs[cl][mode]))
907 IOR_HARD_REG_SET (*set, reg_class_contents[cl]);
908 break;
909 }
910 } 870 }
911 } 871 }
912 } 872 }
913 /* Processes input operands, if IN_P, or output operands otherwise of 873 /* Processes input operands, if IN_P, or output operands otherwise of
914 the current insn with FREQ to find allocno which can use only one 874 the current insn with FREQ to find allocno which can use only one
942 operand = SUBREG_REG (operand); 902 operand = SUBREG_REG (operand);
943 903
944 if (REG_P (operand) 904 if (REG_P (operand)
945 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER) 905 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
946 { 906 {
947 enum reg_class cover_class; 907 enum reg_class aclass;
948 908
949 operand_a = ira_curr_regno_allocno_map[regno]; 909 operand_a = ira_curr_regno_allocno_map[regno];
950 cover_class = ALLOCNO_COVER_CLASS (operand_a); 910 aclass = ALLOCNO_CLASS (operand_a);
951 if (ira_class_subset_p[cl][cover_class] 911 if (ira_class_subset_p[cl][aclass])
952 && ira_class_hard_regs_num[cl] != 0)
953 { 912 {
954 /* View the desired allocation of OPERAND as: 913 /* View the desired allocation of OPERAND as:
955 914
956 (REG:YMODE YREGNO), 915 (REG:YMODE YREGNO),
957 916
958 a simplification of: 917 a simplification of:
959 918
960 (subreg:YMODE (reg:XMODE XREGNO) OFFSET). */ 919 (subreg:YMODE (reg:XMODE XREGNO) OFFSET). */
961 enum machine_mode ymode, xmode; 920 machine_mode ymode, xmode;
962 int xregno, yregno; 921 int xregno, yregno;
963 HOST_WIDE_INT offset; 922 HOST_WIDE_INT offset;
964 923
965 xmode = recog_data.operand_mode[i]; 924 xmode = recog_data.operand_mode[i];
966 xregno = ira_class_hard_regs[cl][0]; 925 xregno = ira_class_singleton[cl][xmode];
926 gcc_assert (xregno >= 0);
967 ymode = ALLOCNO_MODE (operand_a); 927 ymode = ALLOCNO_MODE (operand_a);
968 offset = subreg_lowpart_offset (ymode, xmode); 928 offset = subreg_lowpart_offset (ymode, xmode);
969 yregno = simplify_subreg_regno (xregno, xmode, offset, ymode); 929 yregno = simplify_subreg_regno (xregno, xmode, offset, ymode);
970 if (yregno >= 0 930 if (yregno >= 0
971 && ira_class_hard_reg_index[cover_class][yregno] >= 0) 931 && ira_class_hard_reg_index[aclass][yregno] >= 0)
972 { 932 {
973 int cost; 933 int cost;
974 934
975 ira_allocate_and_set_costs 935 ira_allocate_and_set_costs
976 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a), 936 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a),
977 cover_class, 0); 937 aclass, 0);
978 cost 938 ira_init_register_move_cost_if_necessary (xmode);
979 = (freq 939 cost = freq * (in_p
980 * (in_p 940 ? ira_register_move_cost[xmode][aclass][cl]
981 ? ira_get_register_move_cost (xmode, cover_class, cl) 941 : ira_register_move_cost[xmode][cl][aclass]);
982 : ira_get_register_move_cost (xmode, cl,
983 cover_class)));
984 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a) 942 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
985 [ira_class_hard_reg_index[cover_class][yregno]] -= cost; 943 [ira_class_hard_reg_index[aclass][yregno]] -= cost;
986 } 944 }
987 } 945 }
988 } 946 }
989 947
990 EXECUTE_IF_SET_IN_SPARSESET (objects_live, px) 948 EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
1003 } 961 }
1004 } 962 }
1005 } 963 }
1006 } 964 }
1007 965
1008 /* Return true when one of the predecessor edges of BB is marked with 966 /* Look through the CALL_INSN_FUNCTION_USAGE of a call insn INSN, and see if
1009 EDGE_ABNORMAL_CALL or EDGE_EH. */ 967 we find a SET rtx that we can use to deduce that a register can be cheaply
1010 static bool 968 caller-saved. Return such a register, or NULL_RTX if none is found. */
1011 bb_has_abnormal_call_pred (basic_block bb) 969 static rtx
1012 { 970 find_call_crossed_cheap_reg (rtx_insn *insn)
1013 edge e; 971 {
1014 edge_iterator ei; 972 rtx cheap_reg = NULL_RTX;
1015 973 rtx exp = CALL_INSN_FUNCTION_USAGE (insn);
1016 FOR_EACH_EDGE (e, ei, bb->preds) 974
1017 { 975 while (exp != NULL)
1018 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)) 976 {
1019 return true; 977 rtx x = XEXP (exp, 0);
1020 } 978 if (GET_CODE (x) == SET)
1021 return false; 979 {
1022 } 980 exp = x;
981 break;
982 }
983 exp = XEXP (exp, 1);
984 }
985 if (exp != NULL)
986 {
987 basic_block bb = BLOCK_FOR_INSN (insn);
988 rtx reg = SET_SRC (exp);
989 rtx_insn *prev = PREV_INSN (insn);
990 while (prev && !(INSN_P (prev)
991 && BLOCK_FOR_INSN (prev) != bb))
992 {
993 if (NONDEBUG_INSN_P (prev))
994 {
995 rtx set = single_set (prev);
996
997 if (set && rtx_equal_p (SET_DEST (set), reg))
998 {
999 rtx src = SET_SRC (set);
1000 if (!REG_P (src) || HARD_REGISTER_P (src)
1001 || !pseudo_regno_single_word_and_live_p (REGNO (src)))
1002 break;
1003 if (!modified_between_p (src, prev, insn))
1004 cheap_reg = src;
1005 break;
1006 }
1007 if (set && rtx_equal_p (SET_SRC (set), reg))
1008 {
1009 rtx dest = SET_DEST (set);
1010 if (!REG_P (dest) || HARD_REGISTER_P (dest)
1011 || !pseudo_regno_single_word_and_live_p (REGNO (dest)))
1012 break;
1013 if (!modified_between_p (dest, prev, insn))
1014 cheap_reg = dest;
1015 break;
1016 }
1017
1018 if (reg_set_p (reg, prev))
1019 break;
1020 }
1021 prev = PREV_INSN (prev);
1022 }
1023 }
1024 return cheap_reg;
1025 }
1023 1026
1024 /* Process insns of the basic block given by its LOOP_TREE_NODE to 1027 /* Process insns of the basic block given by its LOOP_TREE_NODE to
1025 update allocno live ranges, allocno hard register conflicts, 1028 update allocno live ranges, allocno hard register conflicts,
1026 intersected calls, and register pressure info for allocnos for the 1029 intersected calls, and register pressure info for allocnos for the
1027 basic block for and regions containing the basic block. */ 1030 basic block for and regions containing the basic block. */
1029 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node) 1032 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
1030 { 1033 {
1031 int i, freq; 1034 int i, freq;
1032 unsigned int j; 1035 unsigned int j;
1033 basic_block bb; 1036 basic_block bb;
1034 rtx insn; 1037 rtx_insn *insn;
1035 bitmap_iterator bi; 1038 bitmap_iterator bi;
1036 bitmap reg_live_out; 1039 bitmap reg_live_out;
1037 unsigned int px; 1040 unsigned int px;
1038 bool set_p; 1041 bool set_p;
1039 1042
1040 bb = loop_tree_node->bb; 1043 bb = loop_tree_node->bb;
1041 if (bb != NULL) 1044 if (bb != NULL)
1042 { 1045 {
1043 for (i = 0; i < ira_reg_class_cover_size; i++) 1046 for (i = 0; i < ira_pressure_classes_num; i++)
1044 { 1047 {
1045 curr_reg_pressure[ira_reg_class_cover[i]] = 0; 1048 curr_reg_pressure[ira_pressure_classes[i]] = 0;
1046 high_pressure_start_point[ira_reg_class_cover[i]] = -1; 1049 high_pressure_start_point[ira_pressure_classes[i]] = -1;
1047 } 1050 }
1048 curr_bb_node = loop_tree_node; 1051 curr_bb_node = loop_tree_node;
1049 reg_live_out = DF_LR_OUT (bb); 1052 reg_live_out = df_get_live_out (bb);
1050 sparseset_clear (objects_live); 1053 sparseset_clear (objects_live);
1051 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out); 1054 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
1052 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset); 1055 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
1053 AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs); 1056 AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
1054 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1057 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1055 if (TEST_HARD_REG_BIT (hard_regs_live, i)) 1058 if (TEST_HARD_REG_BIT (hard_regs_live, i))
1056 { 1059 {
1057 enum reg_class cover_class, cl; 1060 enum reg_class aclass, pclass, cl;
1058 1061
1059 cover_class = ira_class_translate[REGNO_REG_CLASS (i)]; 1062 aclass = ira_allocno_class_translate[REGNO_REG_CLASS (i)];
1063 pclass = ira_pressure_class_translate[aclass];
1060 for (j = 0; 1064 for (j = 0;
1061 (cl = ira_reg_class_super_classes[cover_class][j]) 1065 (cl = ira_reg_class_super_classes[pclass][j])
1062 != LIM_REG_CLASSES; 1066 != LIM_REG_CLASSES;
1063 j++) 1067 j++)
1064 { 1068 {
1069 if (! ira_reg_pressure_class_p[cl])
1070 continue;
1065 curr_reg_pressure[cl]++; 1071 curr_reg_pressure[cl]++;
1066 if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl]) 1072 if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
1067 curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl]; 1073 curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
1068 ira_assert (curr_reg_pressure[cl] 1074 ira_assert (curr_reg_pressure[cl]
1069 <= ira_available_class_regs[cl]); 1075 <= ira_class_hard_regs_num[cl]);
1070 } 1076 }
1071 } 1077 }
1072 EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi) 1078 EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
1073 mark_pseudo_regno_live (j); 1079 mark_pseudo_regno_live (j);
1074 1080
1088 set, FOO will remain live until the beginning of the block. 1094 set, FOO will remain live until the beginning of the block.
1089 Likewise if FOO is not set at all. This is unnecessarily 1095 Likewise if FOO is not set at all. This is unnecessarily
1090 pessimistic, but it probably doesn't matter much in practice. */ 1096 pessimistic, but it probably doesn't matter much in practice. */
1091 FOR_BB_INSNS_REVERSE (bb, insn) 1097 FOR_BB_INSNS_REVERSE (bb, insn)
1092 { 1098 {
1093 df_ref *def_rec, *use_rec; 1099 ira_allocno_t a;
1100 df_ref def, use;
1094 bool call_p; 1101 bool call_p;
1095 1102
1096 if (!NONDEBUG_INSN_P (insn)) 1103 if (!NONDEBUG_INSN_P (insn))
1097 continue; 1104 continue;
1098 1105
1099 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL) 1106 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1100 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n", 1107 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
1101 INSN_UID (insn), loop_tree_node->parent->loop->num, 1108 INSN_UID (insn), loop_tree_node->parent->loop_num,
1102 curr_point); 1109 curr_point);
1110
1111 call_p = CALL_P (insn);
1112 #ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1113 int regno;
1114 bool clear_pic_use_conflict_p = false;
1115 /* Processing insn usage in call insn can create conflict
1116 with pic pseudo and pic hard reg and that is wrong.
1117 Check this situation and fix it at the end of the insn
1118 processing. */
1119 if (call_p && pic_offset_table_rtx != NULL_RTX
1120 && (regno = REGNO (pic_offset_table_rtx)) >= FIRST_PSEUDO_REGISTER
1121 && (a = ira_curr_regno_allocno_map[regno]) != NULL)
1122 clear_pic_use_conflict_p
1123 = (find_regno_fusage (insn, USE, REAL_PIC_OFFSET_TABLE_REGNUM)
1124 && ! TEST_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS
1125 (ALLOCNO_OBJECT (a, 0)),
1126 REAL_PIC_OFFSET_TABLE_REGNUM));
1127 #endif
1103 1128
1104 /* Mark each defined value as live. We need to do this for 1129 /* Mark each defined value as live. We need to do this for
1105 unused values because they still conflict with quantities 1130 unused values because they still conflict with quantities
1106 that are live at the time of the definition. 1131 that are live at the time of the definition.
1107 1132
1108 Ignore DF_REF_MAY_CLOBBERs on a call instruction. Such 1133 Ignore DF_REF_MAY_CLOBBERs on a call instruction. Such
1109 references represent the effect of the called function 1134 references represent the effect of the called function
1110 on a call-clobbered register. Marking the register as 1135 on a call-clobbered register. Marking the register as
1111 live would stop us from allocating it to a call-crossing 1136 live would stop us from allocating it to a call-crossing
1112 allocno. */ 1137 allocno. */
1113 call_p = CALL_P (insn); 1138 FOR_EACH_INSN_DEF (def, insn)
1114 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++) 1139 if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1115 if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER)) 1140 mark_ref_live (def);
1116 mark_ref_live (*def_rec);
1117 1141
1118 /* If INSN has multiple outputs, then any value used in one 1142 /* If INSN has multiple outputs, then any value used in one
1119 of the outputs conflicts with the other outputs. Model this 1143 of the outputs conflicts with the other outputs. Model this
1120 by making the used value live during the output phase. 1144 by making the used value live during the output phase.
1121 1145
1125 occur. Consider if ALLOCNO appears in the address of an 1149 occur. Consider if ALLOCNO appears in the address of an
1126 output and we reload the output. If we allocate ALLOCNO 1150 output and we reload the output. If we allocate ALLOCNO
1127 to the same hard register as an unused output we could 1151 to the same hard register as an unused output we could
1128 set the hard register before the output reload insn. */ 1152 set the hard register before the output reload insn. */
1129 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) 1153 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1130 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++) 1154 FOR_EACH_INSN_USE (use, insn)
1131 { 1155 {
1132 int i; 1156 int i;
1133 rtx reg; 1157 rtx reg;
1134 1158
1135 reg = DF_REF_REG (*use_rec); 1159 reg = DF_REF_REG (use);
1136 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) 1160 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
1137 { 1161 {
1138 rtx set; 1162 rtx set;
1139 1163
1140 set = XVECEXP (PATTERN (insn), 0, i); 1164 set = XVECEXP (PATTERN (insn), 0, i);
1141 if (GET_CODE (set) == SET 1165 if (GET_CODE (set) == SET
1142 && reg_overlap_mentioned_p (reg, SET_DEST (set))) 1166 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
1143 { 1167 {
1144 /* After the previous loop, this is a no-op if 1168 /* After the previous loop, this is a no-op if
1145 REG is contained within SET_DEST (SET). */ 1169 REG is contained within SET_DEST (SET). */
1146 mark_ref_live (*use_rec); 1170 mark_ref_live (use);
1147 break; 1171 break;
1148 } 1172 }
1149 } 1173 }
1150 } 1174 }
1151 1175
1152 extract_insn (insn); 1176 extract_insn (insn);
1153 preprocess_constraints (); 1177 preferred_alternatives = get_preferred_alternatives (insn);
1178 preprocess_constraints (insn);
1154 process_single_reg_class_operands (false, freq); 1179 process_single_reg_class_operands (false, freq);
1155 1180
1156 /* See which defined values die here. */ 1181 /* See which defined values die here. */
1157 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++) 1182 FOR_EACH_INSN_DEF (def, insn)
1158 if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER)) 1183 if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1159 mark_ref_dead (*def_rec); 1184 mark_ref_dead (def);
1160 1185
1161 if (call_p) 1186 if (call_p)
1162 { 1187 {
1188 /* Try to find a SET in the CALL_INSN_FUNCTION_USAGE, and from
1189 there, try to find a pseudo that is live across the call but
1190 can be cheaply reconstructed from the return value. */
1191 rtx cheap_reg = find_call_crossed_cheap_reg (insn);
1192 if (cheap_reg != NULL_RTX)
1193 add_reg_note (insn, REG_RETURNED, cheap_reg);
1194
1163 last_call_num++; 1195 last_call_num++;
1164 sparseset_clear (allocnos_processed); 1196 sparseset_clear (allocnos_processed);
1165 /* The current set of live allocnos are live across the call. */ 1197 /* The current set of live allocnos are live across the call. */
1166 EXECUTE_IF_SET_IN_SPARSESET (objects_live, i) 1198 EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1167 { 1199 {
1168 ira_object_t obj = ira_object_id_map[i]; 1200 ira_object_t obj = ira_object_id_map[i];
1169 ira_allocno_t a = OBJECT_ALLOCNO (obj); 1201 a = OBJECT_ALLOCNO (obj);
1170 int num = ALLOCNO_NUM (a); 1202 int num = ALLOCNO_NUM (a);
1203 HARD_REG_SET this_call_used_reg_set;
1204
1205 get_call_reg_set_usage (insn, &this_call_used_reg_set,
1206 call_used_reg_set);
1171 1207
1172 /* Don't allocate allocnos that cross setjmps or any 1208 /* Don't allocate allocnos that cross setjmps or any
1173 call, if this function receives a nonlocal 1209 call, if this function receives a nonlocal
1174 goto. */ 1210 goto. */
1175 if (cfun->has_nonlocal_label 1211 if (cfun->has_nonlocal_label
1180 SET_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj)); 1216 SET_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
1181 } 1217 }
1182 if (can_throw_internal (insn)) 1218 if (can_throw_internal (insn))
1183 { 1219 {
1184 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), 1220 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1185 call_used_reg_set); 1221 this_call_used_reg_set);
1186 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), 1222 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1187 call_used_reg_set); 1223 this_call_used_reg_set);
1188 } 1224 }
1189 1225
1190 if (sparseset_bit_p (allocnos_processed, num)) 1226 if (sparseset_bit_p (allocnos_processed, num))
1191 continue; 1227 continue;
1192 sparseset_set_bit (allocnos_processed, num); 1228 sparseset_set_bit (allocnos_processed, num);
1193 1229
1194 if (allocno_saved_at_call[num] != last_call_num) 1230 if (allocno_saved_at_call[num] != last_call_num)
1195 /* Here we are mimicking caller-save.c behaviour 1231 /* Here we are mimicking caller-save.c behavior
1196 which does not save hard register at a call if 1232 which does not save hard register at a call if
1197 it was saved on previous call in the same basic 1233 it was saved on previous call in the same basic
1198 block and the hard register was not mentioned 1234 block and the hard register was not mentioned
1199 between the two calls. */ 1235 between the two calls. */
1200 ALLOCNO_CALL_FREQ (a) += freq; 1236 ALLOCNO_CALL_FREQ (a) += freq;
1201 /* Mark it as saved at the next call. */ 1237 /* Mark it as saved at the next call. */
1202 allocno_saved_at_call[num] = last_call_num + 1; 1238 allocno_saved_at_call[num] = last_call_num + 1;
1203 ALLOCNO_CALLS_CROSSED_NUM (a)++; 1239 ALLOCNO_CALLS_CROSSED_NUM (a)++;
1240 IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a),
1241 this_call_used_reg_set);
1242 if (cheap_reg != NULL_RTX
1243 && ALLOCNO_REGNO (a) == (int) REGNO (cheap_reg))
1244 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)++;
1204 } 1245 }
1205 } 1246 }
1206 1247
1207 make_early_clobber_and_input_conflicts (); 1248 make_early_clobber_and_input_conflicts ();
1208 1249
1209 curr_point++; 1250 curr_point++;
1210 1251
1211 /* Mark each used value as live. */ 1252 /* Mark each used value as live. */
1212 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++) 1253 FOR_EACH_INSN_USE (use, insn)
1213 mark_ref_live (*use_rec); 1254 mark_ref_live (use);
1214 1255
1215 process_single_reg_class_operands (true, freq); 1256 process_single_reg_class_operands (true, freq);
1216 1257
1217 set_p = mark_hard_reg_early_clobbers (insn, true); 1258 set_p = mark_hard_reg_early_clobbers (insn, true);
1218 1259
1221 mark_hard_reg_early_clobbers (insn, false); 1262 mark_hard_reg_early_clobbers (insn, false);
1222 1263
1223 /* Mark each hard reg as live again. For example, a 1264 /* Mark each hard reg as live again. For example, a
1224 hard register can be in clobber and in an insn 1265 hard register can be in clobber and in an insn
1225 input. */ 1266 input. */
1226 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++) 1267 FOR_EACH_INSN_USE (use, insn)
1227 { 1268 {
1228 rtx ureg = DF_REF_REG (*use_rec); 1269 rtx ureg = DF_REF_REG (use);
1229 1270
1230 if (GET_CODE (ureg) == SUBREG) 1271 if (GET_CODE (ureg) == SUBREG)
1231 ureg = SUBREG_REG (ureg); 1272 ureg = SUBREG_REG (ureg);
1232 if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER) 1273 if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
1233 continue; 1274 continue;
1234 1275
1235 mark_ref_live (*use_rec); 1276 mark_ref_live (use);
1236 } 1277 }
1237 } 1278 }
1238 1279
1280 #ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1281 if (clear_pic_use_conflict_p)
1282 {
1283 regno = REGNO (pic_offset_table_rtx);
1284 a = ira_curr_regno_allocno_map[regno];
1285 CLEAR_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (ALLOCNO_OBJECT (a, 0)),
1286 REAL_PIC_OFFSET_TABLE_REGNUM);
1287 CLEAR_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS
1288 (ALLOCNO_OBJECT (a, 0)),
1289 REAL_PIC_OFFSET_TABLE_REGNUM);
1290 }
1291 #endif
1239 curr_point++; 1292 curr_point++;
1240 } 1293 }
1241 1294
1242 #ifdef EH_RETURN_DATA_REGNO
1243 if (bb_has_eh_pred (bb)) 1295 if (bb_has_eh_pred (bb))
1244 for (j = 0; ; ++j) 1296 for (j = 0; ; ++j)
1245 { 1297 {
1246 unsigned int regno = EH_RETURN_DATA_REGNO (j); 1298 unsigned int regno = EH_RETURN_DATA_REGNO (j);
1247 if (regno == INVALID_REGNUM) 1299 if (regno == INVALID_REGNUM)
1248 break; 1300 break;
1249 make_hard_regno_born (regno); 1301 make_hard_regno_born (regno);
1250 } 1302 }
1251 #endif
1252 1303
1253 /* Allocnos can't go in stack regs at the start of a basic block 1304 /* Allocnos can't go in stack regs at the start of a basic block
1254 that is reached by an abnormal edge. Likewise for call 1305 that is reached by an abnormal edge. Likewise for call
1255 clobbered regs, because caller-save, fixup_abnormal_edges and 1306 clobbered regs, because caller-save, fixup_abnormal_edges and
1256 possibly the table driven EH machinery are not quite ready to 1307 possibly the table driven EH machinery are not quite ready to
1259 { 1310 {
1260 #ifdef STACK_REGS 1311 #ifdef STACK_REGS
1261 EXECUTE_IF_SET_IN_SPARSESET (objects_live, px) 1312 EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
1262 { 1313 {
1263 ira_allocno_t a = OBJECT_ALLOCNO (ira_object_id_map[px]); 1314 ira_allocno_t a = OBJECT_ALLOCNO (ira_object_id_map[px]);
1315
1264 ALLOCNO_NO_STACK_REG_P (a) = true; 1316 ALLOCNO_NO_STACK_REG_P (a) = true;
1265 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = true; 1317 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = true;
1266 } 1318 }
1267 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++) 1319 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
1268 make_hard_regno_born (px); 1320 make_hard_regno_born (px);
1269 #endif 1321 #endif
1270 /* No need to record conflicts for call clobbered regs if we 1322 /* No need to record conflicts for call clobbered regs if we
1271 have nonlocal labels around, as we don't ever try to 1323 have nonlocal labels around, as we don't ever try to
1272 allocate such regs in this case. */ 1324 allocate such regs in this case. */
1273 if (!cfun->has_nonlocal_label && bb_has_abnormal_call_pred (bb)) 1325 if (!cfun->has_nonlocal_label
1326 && has_abnormal_call_or_eh_pred_edge_p (bb))
1274 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++) 1327 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
1275 if (call_used_regs[px]) 1328 if (call_used_regs[px]
1329 #ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1330 /* We should create a conflict of PIC pseudo with
1331 PIC hard reg as PIC hard reg can have a wrong
1332 value after jump described by the abnormal edge.
1333 In this case we can not allocate PIC hard reg to
1334 PIC pseudo as PIC pseudo will also have a wrong
1335 value. This code is not critical as LRA can fix
1336 it but it is better to have the right allocation
1337 earlier. */
1338 || (px == REAL_PIC_OFFSET_TABLE_REGNUM
1339 && pic_offset_table_rtx != NULL_RTX
1340 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
1341 #endif
1342 )
1276 make_hard_regno_born (px); 1343 make_hard_regno_born (px);
1277 } 1344 }
1278 1345
1279 EXECUTE_IF_SET_IN_SPARSESET (objects_live, i) 1346 EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1280 make_object_dead (ira_object_id_map[i]); 1347 make_object_dead (ira_object_id_map[i]);
1281 1348
1282 curr_point++; 1349 curr_point++;
1283 1350
1284 } 1351 }
1285 /* Propagate register pressure to upper loop tree nodes: */ 1352 /* Propagate register pressure to upper loop tree nodes. */
1286 if (loop_tree_node != ira_loop_tree_root) 1353 if (loop_tree_node != ira_loop_tree_root)
1287 for (i = 0; i < ira_reg_class_cover_size; i++) 1354 for (i = 0; i < ira_pressure_classes_num; i++)
1288 { 1355 {
1289 enum reg_class cover_class; 1356 enum reg_class pclass;
1290 1357
1291 cover_class = ira_reg_class_cover[i]; 1358 pclass = ira_pressure_classes[i];
1292 if (loop_tree_node->reg_pressure[cover_class] 1359 if (loop_tree_node->reg_pressure[pclass]
1293 > loop_tree_node->parent->reg_pressure[cover_class]) 1360 > loop_tree_node->parent->reg_pressure[pclass])
1294 loop_tree_node->parent->reg_pressure[cover_class] 1361 loop_tree_node->parent->reg_pressure[pclass]
1295 = loop_tree_node->reg_pressure[cover_class]; 1362 = loop_tree_node->reg_pressure[pclass];
1296 } 1363 }
1297 } 1364 }
1298 1365
1299 /* Create and set up IRA_START_POINT_RANGES and 1366 /* Create and set up IRA_START_POINT_RANGES and
1300 IRA_FINISH_POINT_RANGES. */ 1367 IRA_FINISH_POINT_RANGES. */
1340 unsigned i; 1407 unsigned i;
1341 int n; 1408 int n;
1342 int *map; 1409 int *map;
1343 ira_object_t obj; 1410 ira_object_t obj;
1344 ira_object_iterator oi; 1411 ira_object_iterator oi;
1345 live_range_t r; 1412 live_range_t r, prev_r, next_r;
1346 sbitmap born_or_dead, born, dead;
1347 sbitmap_iterator sbi; 1413 sbitmap_iterator sbi;
1348 bool born_p, dead_p, prev_born_p, prev_dead_p; 1414 bool born_p, dead_p, prev_born_p, prev_dead_p;
1349 1415
1350 born = sbitmap_alloc (ira_max_point); 1416 auto_sbitmap born (ira_max_point);
1351 dead = sbitmap_alloc (ira_max_point); 1417 auto_sbitmap dead (ira_max_point);
1352 sbitmap_zero (born); 1418 bitmap_clear (born);
1353 sbitmap_zero (dead); 1419 bitmap_clear (dead);
1354 FOR_EACH_OBJECT (obj, oi) 1420 FOR_EACH_OBJECT (obj, oi)
1355 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next) 1421 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1356 { 1422 {
1357 ira_assert (r->start <= r->finish); 1423 ira_assert (r->start <= r->finish);
1358 SET_BIT (born, r->start); 1424 bitmap_set_bit (born, r->start);
1359 SET_BIT (dead, r->finish); 1425 bitmap_set_bit (dead, r->finish);
1360 } 1426 }
1361 1427
1362 born_or_dead = sbitmap_alloc (ira_max_point); 1428 auto_sbitmap born_or_dead (ira_max_point);
1363 sbitmap_a_or_b (born_or_dead, born, dead); 1429 bitmap_ior (born_or_dead, born, dead);
1364 map = (int *) ira_allocate (sizeof (int) * ira_max_point); 1430 map = (int *) ira_allocate (sizeof (int) * ira_max_point);
1365 n = -1; 1431 n = -1;
1366 prev_born_p = prev_dead_p = false; 1432 prev_born_p = prev_dead_p = false;
1367 EXECUTE_IF_SET_IN_SBITMAP (born_or_dead, 0, i, sbi) 1433 EXECUTE_IF_SET_IN_BITMAP (born_or_dead, 0, i, sbi)
1368 { 1434 {
1369 born_p = TEST_BIT (born, i); 1435 born_p = bitmap_bit_p (born, i);
1370 dead_p = TEST_BIT (dead, i); 1436 dead_p = bitmap_bit_p (dead, i);
1371 if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p) 1437 if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
1372 || (prev_dead_p && ! prev_born_p && dead_p && ! born_p)) 1438 || (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
1373 map[i] = n; 1439 map[i] = n;
1374 else 1440 else
1375 map[i] = ++n; 1441 map[i] = ++n;
1376 prev_born_p = born_p; 1442 prev_born_p = born_p;
1377 prev_dead_p = dead_p; 1443 prev_dead_p = dead_p;
1378 } 1444 }
1379 sbitmap_free (born_or_dead); 1445
1380 sbitmap_free (born);
1381 sbitmap_free (dead);
1382 n++; 1446 n++;
1383 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL) 1447 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1384 fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n", 1448 fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
1385 ira_max_point, n, 100 * n / ira_max_point); 1449 ira_max_point, n, 100 * n / ira_max_point);
1386 ira_max_point = n; 1450 ira_max_point = n;
1387 1451
1388 FOR_EACH_OBJECT (obj, oi) 1452 FOR_EACH_OBJECT (obj, oi)
1389 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next) 1453 for (r = OBJECT_LIVE_RANGES (obj), prev_r = NULL; r != NULL; r = next_r)
1390 { 1454 {
1455 next_r = r->next;
1391 r->start = map[r->start]; 1456 r->start = map[r->start];
1392 r->finish = map[r->finish]; 1457 r->finish = map[r->finish];
1458 if (prev_r == NULL || prev_r->start > r->finish + 1)
1459 {
1460 prev_r = r;
1461 continue;
1462 }
1463 prev_r->start = r->start;
1464 prev_r->next = next_r;
1465 ira_finish_live_range (r);
1393 } 1466 }
1394 1467
1395 ira_free (map); 1468 ira_free (map);
1396 } 1469 }
1397 1470
1402 for (; r != NULL; r = r->next) 1475 for (; r != NULL; r = r->next)
1403 fprintf (f, " [%d..%d]", r->start, r->finish); 1476 fprintf (f, " [%d..%d]", r->start, r->finish);
1404 fprintf (f, "\n"); 1477 fprintf (f, "\n");
1405 } 1478 }
1406 1479
1480 DEBUG_FUNCTION void
1481 debug (live_range &ref)
1482 {
1483 ira_print_live_range_list (stderr, &ref);
1484 }
1485
1486 DEBUG_FUNCTION void
1487 debug (live_range *ptr)
1488 {
1489 if (ptr)
1490 debug (*ptr);
1491 else
1492 fprintf (stderr, "<nil>\n");
1493 }
1494
1407 /* Print live ranges R to stderr. */ 1495 /* Print live ranges R to stderr. */
1408 void 1496 void
1409 ira_debug_live_range_list (live_range_t r) 1497 ira_debug_live_range_list (live_range_t r)
1410 { 1498 {
1411 ira_print_live_range_list (stderr, r); 1499 ira_print_live_range_list (stderr, r);
1422 static void 1510 static void
1423 print_allocno_live_ranges (FILE *f, ira_allocno_t a) 1511 print_allocno_live_ranges (FILE *f, ira_allocno_t a)
1424 { 1512 {
1425 int n = ALLOCNO_NUM_OBJECTS (a); 1513 int n = ALLOCNO_NUM_OBJECTS (a);
1426 int i; 1514 int i;
1515
1427 for (i = 0; i < n; i++) 1516 for (i = 0; i < n; i++)
1428 { 1517 {
1429 fprintf (f, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a)); 1518 fprintf (f, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1430 if (n > 1) 1519 if (n > 1)
1431 fprintf (f, " [%d]", i); 1520 fprintf (f, " [%d]", i);