Mercurial > hg > CbC > CbC_gcc
comparison gcc/alias.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | 855418dad1a3 |
children | b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
44 #include "cgraph.h" | 44 #include "cgraph.h" |
45 #include "varray.h" | 45 #include "varray.h" |
46 #include "tree-pass.h" | 46 #include "tree-pass.h" |
47 #include "ipa-type-escape.h" | 47 #include "ipa-type-escape.h" |
48 #include "df.h" | 48 #include "df.h" |
49 #include "tree-ssa-alias.h" | |
50 #include "pointer-set.h" | |
51 #include "tree-flow.h" | |
49 | 52 |
50 /* The aliasing API provided here solves related but different problems: | 53 /* The aliasing API provided here solves related but different problems: |
51 | 54 |
52 Say there exists (in c) | 55 Say there exists (in c) |
53 | 56 |
126 | 129 |
127 Alias set zero is implicitly a superset of all other alias sets. | 130 Alias set zero is implicitly a superset of all other alias sets. |
128 However, this is no actual entry for alias set zero. It is an | 131 However, this is no actual entry for alias set zero. It is an |
129 error to attempt to explicitly construct a subset of zero. */ | 132 error to attempt to explicitly construct a subset of zero. */ |
130 | 133 |
131 struct alias_set_entry GTY(()) | 134 struct GTY(()) alias_set_entry_d { |
132 { | |
133 /* The alias set number, as stored in MEM_ALIAS_SET. */ | 135 /* The alias set number, as stored in MEM_ALIAS_SET. */ |
134 alias_set_type alias_set; | 136 alias_set_type alias_set; |
135 | 137 |
136 /* Nonzero if would have a child of zero: this effectively makes this | 138 /* Nonzero if would have a child of zero: this effectively makes this |
137 alias set the same as alias set zero. */ | 139 alias set the same as alias set zero. */ |
144 | 146 |
145 continuing our example above, the children here will be all of | 147 continuing our example above, the children here will be all of |
146 `int', `double', `float', and `struct S'. */ | 148 `int', `double', `float', and `struct S'. */ |
147 splay_tree GTY((param1_is (int), param2_is (int))) children; | 149 splay_tree GTY((param1_is (int), param2_is (int))) children; |
148 }; | 150 }; |
149 typedef struct alias_set_entry *alias_set_entry; | 151 typedef struct alias_set_entry_d *alias_set_entry; |
150 | 152 |
151 static int rtx_equal_for_memref_p (const_rtx, const_rtx); | 153 static int rtx_equal_for_memref_p (const_rtx, const_rtx); |
152 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT); | 154 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT); |
153 static void record_set (rtx, const_rtx, void *); | 155 static void record_set (rtx, const_rtx, void *); |
154 static int base_alias_check (rtx, rtx, enum machine_mode, | 156 static int base_alias_check (rtx, rtx, enum machine_mode, |
155 enum machine_mode); | 157 enum machine_mode); |
156 static rtx find_base_value (rtx); | 158 static rtx find_base_value (rtx); |
157 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx); | 159 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx); |
158 static int insert_subset_children (splay_tree_node, void*); | 160 static int insert_subset_children (splay_tree_node, void*); |
159 static tree find_base_decl (tree); | |
160 static alias_set_entry get_alias_set_entry (alias_set_type); | 161 static alias_set_entry get_alias_set_entry (alias_set_type); |
161 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx, | 162 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx, |
162 bool (*) (const_rtx, bool)); | 163 bool (*) (const_rtx, bool)); |
163 static int aliases_everything_p (const_rtx); | 164 static int aliases_everything_p (const_rtx); |
164 static bool nonoverlapping_component_refs_p (const_tree, const_tree); | 165 static bool nonoverlapping_component_refs_p (const_tree, const_tree); |
249 DEF_VEC_ALLOC_P(alias_set_entry,gc); | 250 DEF_VEC_ALLOC_P(alias_set_entry,gc); |
250 | 251 |
251 /* The splay-tree used to store the various alias set entries. */ | 252 /* The splay-tree used to store the various alias set entries. */ |
252 static GTY (()) VEC(alias_set_entry,gc) *alias_sets; | 253 static GTY (()) VEC(alias_set_entry,gc) *alias_sets; |
253 | 254 |
255 /* Build a decomposed reference object for querying the alias-oracle | |
256 from the MEM rtx and store it in *REF. | |
257 Returns false if MEM is not suitable for the alias-oracle. */ | |
258 | |
259 static bool | |
260 ao_ref_from_mem (ao_ref *ref, const_rtx mem) | |
261 { | |
262 tree expr = MEM_EXPR (mem); | |
263 tree base; | |
264 | |
265 if (!expr) | |
266 return false; | |
267 | |
268 /* If MEM_OFFSET or MEM_SIZE are NULL punt. */ | |
269 if (!MEM_OFFSET (mem) | |
270 || !MEM_SIZE (mem)) | |
271 return false; | |
272 | |
273 ao_ref_init (ref, expr); | |
274 | |
275 /* Get the base of the reference and see if we have to reject or | |
276 adjust it. */ | |
277 base = ao_ref_base (ref); | |
278 if (base == NULL_TREE) | |
279 return false; | |
280 | |
281 /* If this is a pointer dereference of a non-SSA_NAME punt. | |
282 ??? We could replace it with a pointer to anything. */ | |
283 if (INDIRECT_REF_P (base) | |
284 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME) | |
285 return false; | |
286 | |
287 /* The tree oracle doesn't like to have these. */ | |
288 if (TREE_CODE (base) == FUNCTION_DECL | |
289 || TREE_CODE (base) == LABEL_DECL) | |
290 return false; | |
291 | |
292 /* If this is a reference based on a partitioned decl replace the | |
293 base with an INDIRECT_REF of the pointer representative we | |
294 created during stack slot partitioning. */ | |
295 if (TREE_CODE (base) == VAR_DECL | |
296 && ! TREE_STATIC (base) | |
297 && cfun->gimple_df->decls_to_pointers != NULL) | |
298 { | |
299 void *namep; | |
300 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base); | |
301 if (namep) | |
302 { | |
303 ref->base_alias_set = get_alias_set (base); | |
304 ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep); | |
305 } | |
306 } | |
307 | |
308 ref->ref_alias_set = MEM_ALIAS_SET (mem); | |
309 | |
310 /* If the base decl is a parameter we can have negative MEM_OFFSET in | |
311 case of promoted subregs on bigendian targets. Trust the MEM_EXPR | |
312 here. */ | |
313 if (INTVAL (MEM_OFFSET (mem)) < 0 | |
314 && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem))) | |
315 * BITS_PER_UNIT) == ref->size) | |
316 return true; | |
317 | |
318 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT; | |
319 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT; | |
320 | |
321 /* The MEM may extend into adjacent fields, so adjust max_size if | |
322 necessary. */ | |
323 if (ref->max_size != -1 | |
324 && ref->size > ref->max_size) | |
325 ref->max_size = ref->size; | |
326 | |
327 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of | |
328 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */ | |
329 if (MEM_EXPR (mem) != get_spill_slot_decl (false) | |
330 && (ref->offset < 0 | |
331 || (DECL_P (ref->base) | |
332 && (!host_integerp (DECL_SIZE (ref->base), 1) | |
333 || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base))) | |
334 < (unsigned HOST_WIDE_INT)(ref->offset + ref->size)))))) | |
335 return false; | |
336 | |
337 return true; | |
338 } | |
339 | |
340 /* Query the alias-oracle on whether the two memory rtx X and MEM may | |
341 alias. If TBAA_P is set also apply TBAA. Returns true if the | |
342 two rtxen may alias, false otherwise. */ | |
343 | |
344 static bool | |
345 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p) | |
346 { | |
347 ao_ref ref1, ref2; | |
348 | |
349 if (!ao_ref_from_mem (&ref1, x) | |
350 || !ao_ref_from_mem (&ref2, mem)) | |
351 return true; | |
352 | |
353 return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p); | |
354 } | |
355 | |
254 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is | 356 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is |
255 such an entry, or NULL otherwise. */ | 357 such an entry, or NULL otherwise. */ |
256 | 358 |
257 static inline alias_set_entry | 359 static inline alias_set_entry |
258 get_alias_set_entry (alias_set_type alias_set) | 360 get_alias_set_entry (alias_set_type alias_set) |
348 { | 450 { |
349 if (MEM_P (*x)) | 451 if (MEM_P (*x)) |
350 { | 452 { |
351 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem))) | 453 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem))) |
352 return 1; | 454 return 1; |
353 | 455 |
354 return -1; | 456 return -1; |
355 } | 457 } |
356 return 0; | 458 return 0; |
357 } | 459 } |
358 | 460 |
359 static int | 461 static int |
363 { | 465 { |
364 /* Visit all MEMs in *PAT and check indepedence. */ | 466 /* Visit all MEMs in *PAT and check indepedence. */ |
365 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x)) | 467 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x)) |
366 /* Indicate that dependence was determined and stop traversal. */ | 468 /* Indicate that dependence was determined and stop traversal. */ |
367 return 1; | 469 return 1; |
368 | 470 |
369 return -1; | 471 return -1; |
370 } | 472 } |
371 return 0; | 473 return 0; |
372 } | 474 } |
373 | 475 |
421 t2 for which a pair of subobjects of these respective subtypes | 523 t2 for which a pair of subobjects of these respective subtypes |
422 overlaps on the stack. */ | 524 overlaps on the stack. */ |
423 return alias_sets_must_conflict_p (set1, set2); | 525 return alias_sets_must_conflict_p (set1, set2); |
424 } | 526 } |
425 | 527 |
426 /* T is an expression with pointer type. Find the DECL on which this | |
427 expression is based. (For example, in `a[i]' this would be `a'.) | |
428 If there is no such DECL, or a unique decl cannot be determined, | |
429 NULL_TREE is returned. */ | |
430 | |
431 static tree | |
432 find_base_decl (tree t) | |
433 { | |
434 tree d0, d1; | |
435 | |
436 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t))) | |
437 return 0; | |
438 | |
439 /* If this is a declaration, return it. If T is based on a restrict | |
440 qualified decl, return that decl. */ | |
441 if (DECL_P (t)) | |
442 { | |
443 if (TREE_CODE (t) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (t)) | |
444 t = DECL_GET_RESTRICT_BASE (t); | |
445 return t; | |
446 } | |
447 | |
448 /* Handle general expressions. It would be nice to deal with | |
449 COMPONENT_REFs here. If we could tell that `a' and `b' were the | |
450 same, then `a->f' and `b->f' are also the same. */ | |
451 switch (TREE_CODE_CLASS (TREE_CODE (t))) | |
452 { | |
453 case tcc_unary: | |
454 return find_base_decl (TREE_OPERAND (t, 0)); | |
455 | |
456 case tcc_binary: | |
457 /* Return 0 if found in neither or both are the same. */ | |
458 d0 = find_base_decl (TREE_OPERAND (t, 0)); | |
459 d1 = find_base_decl (TREE_OPERAND (t, 1)); | |
460 if (d0 == d1) | |
461 return d0; | |
462 else if (d0 == 0) | |
463 return d1; | |
464 else if (d1 == 0) | |
465 return d0; | |
466 else | |
467 return 0; | |
468 | |
469 default: | |
470 return 0; | |
471 } | |
472 } | |
473 | |
474 /* Return true if all nested component references handled by | 528 /* Return true if all nested component references handled by |
475 get_inner_reference in T are such that we should use the alias set | 529 get_inner_reference in T are such that we should use the alias set |
476 provided by the object at the heart of T. | 530 provided by the object at the heart of T. |
477 | 531 |
478 This is true for non-addressable components (which don't have their | 532 This is true for non-addressable components (which don't have their |
516 if (get_alias_set (TREE_TYPE (t)) == 0) | 570 if (get_alias_set (TREE_TYPE (t)) == 0) |
517 return true; | 571 return true; |
518 } | 572 } |
519 } | 573 } |
520 | 574 |
575 /* Return the alias set for the memory pointed to by T, which may be | |
576 either a type or an expression. Return -1 if there is nothing | |
577 special about dereferencing T. */ | |
578 | |
579 static alias_set_type | |
580 get_deref_alias_set_1 (tree t) | |
581 { | |
582 /* If we're not doing any alias analysis, just assume everything | |
583 aliases everything else. */ | |
584 if (!flag_strict_aliasing) | |
585 return 0; | |
586 | |
587 /* All we care about is the type. */ | |
588 if (! TYPE_P (t)) | |
589 t = TREE_TYPE (t); | |
590 | |
591 /* If we have an INDIRECT_REF via a void pointer, we don't | |
592 know anything about what that might alias. Likewise if the | |
593 pointer is marked that way. */ | |
594 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE | |
595 || TYPE_REF_CAN_ALIAS_ALL (t)) | |
596 return 0; | |
597 | |
598 return -1; | |
599 } | |
600 | |
601 /* Return the alias set for the memory pointed to by T, which may be | |
602 either a type or an expression. */ | |
603 | |
604 alias_set_type | |
605 get_deref_alias_set (tree t) | |
606 { | |
607 alias_set_type set = get_deref_alias_set_1 (t); | |
608 | |
609 /* Fall back to the alias-set of the pointed-to type. */ | |
610 if (set == -1) | |
611 { | |
612 if (! TYPE_P (t)) | |
613 t = TREE_TYPE (t); | |
614 set = get_alias_set (TREE_TYPE (t)); | |
615 } | |
616 | |
617 return set; | |
618 } | |
619 | |
521 /* Return the alias set for T, which may be either a type or an | 620 /* Return the alias set for T, which may be either a type or an |
522 expression. Call language-specific routine for help, if needed. */ | 621 expression. Call language-specific routine for help, if needed. */ |
523 | 622 |
524 alias_set_type | 623 alias_set_type |
525 get_alias_set (tree t) | 624 get_alias_set (tree t) |
539 to figure out what to do. At each juncture, we see if this is a tree | 638 to figure out what to do. At each juncture, we see if this is a tree |
540 that the language may need to handle specially. First handle things that | 639 that the language may need to handle specially. First handle things that |
541 aren't types. */ | 640 aren't types. */ |
542 if (! TYPE_P (t)) | 641 if (! TYPE_P (t)) |
543 { | 642 { |
544 tree inner = t; | 643 tree inner; |
545 | 644 |
546 /* Remove any nops, then give the language a chance to do | 645 /* Remove any nops, then give the language a chance to do |
547 something with this tree before we look at it. */ | 646 something with this tree before we look at it. */ |
548 STRIP_NOPS (t); | 647 STRIP_NOPS (t); |
549 set = lang_hooks.get_alias_set (t); | 648 set = lang_hooks.get_alias_set (t); |
550 if (set != -1) | 649 if (set != -1) |
551 return set; | 650 return set; |
552 | 651 |
652 /* Retrieve the original memory reference if needed. */ | |
653 if (TREE_CODE (t) == TARGET_MEM_REF) | |
654 t = TMR_ORIGINAL (t); | |
655 | |
553 /* First see if the actual object referenced is an INDIRECT_REF from a | 656 /* First see if the actual object referenced is an INDIRECT_REF from a |
554 restrict-qualified pointer or a "void *". */ | 657 restrict-qualified pointer or a "void *". */ |
658 inner = t; | |
555 while (handled_component_p (inner)) | 659 while (handled_component_p (inner)) |
556 { | 660 { |
557 inner = TREE_OPERAND (inner, 0); | 661 inner = TREE_OPERAND (inner, 0); |
558 STRIP_NOPS (inner); | 662 STRIP_NOPS (inner); |
559 } | 663 } |
560 | 664 |
561 /* Check for accesses through restrict-qualified pointers. */ | |
562 if (INDIRECT_REF_P (inner)) | 665 if (INDIRECT_REF_P (inner)) |
563 { | 666 { |
564 tree decl; | 667 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0)); |
565 | 668 if (set != -1) |
566 if (TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME) | 669 return set; |
567 decl = SSA_NAME_VAR (TREE_OPERAND (inner, 0)); | |
568 else | |
569 decl = find_base_decl (TREE_OPERAND (inner, 0)); | |
570 | |
571 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl)) | |
572 { | |
573 /* If we haven't computed the actual alias set, do it now. */ | |
574 if (DECL_POINTER_ALIAS_SET (decl) == -2) | |
575 { | |
576 tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl)); | |
577 | |
578 /* No two restricted pointers can point at the same thing. | |
579 However, a restricted pointer can point at the same thing | |
580 as an unrestricted pointer, if that unrestricted pointer | |
581 is based on the restricted pointer. So, we make the | |
582 alias set for the restricted pointer a subset of the | |
583 alias set for the type pointed to by the type of the | |
584 decl. */ | |
585 alias_set_type pointed_to_alias_set | |
586 = get_alias_set (pointed_to_type); | |
587 | |
588 if (pointed_to_alias_set == 0) | |
589 /* It's not legal to make a subset of alias set zero. */ | |
590 DECL_POINTER_ALIAS_SET (decl) = 0; | |
591 else if (AGGREGATE_TYPE_P (pointed_to_type)) | |
592 /* For an aggregate, we must treat the restricted | |
593 pointer the same as an ordinary pointer. If we | |
594 were to make the type pointed to by the | |
595 restricted pointer a subset of the pointed-to | |
596 type, then we would believe that other subsets | |
597 of the pointed-to type (such as fields of that | |
598 type) do not conflict with the type pointed to | |
599 by the restricted pointer. */ | |
600 DECL_POINTER_ALIAS_SET (decl) | |
601 = pointed_to_alias_set; | |
602 else | |
603 { | |
604 DECL_POINTER_ALIAS_SET (decl) = new_alias_set (); | |
605 record_alias_subset (pointed_to_alias_set, | |
606 DECL_POINTER_ALIAS_SET (decl)); | |
607 } | |
608 } | |
609 | |
610 /* We use the alias set indicated in the declaration. */ | |
611 return DECL_POINTER_ALIAS_SET (decl); | |
612 } | |
613 | |
614 /* If we have an INDIRECT_REF via a void pointer, we don't | |
615 know anything about what that might alias. Likewise if the | |
616 pointer is marked that way. */ | |
617 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE | |
618 || (TYPE_REF_CAN_ALIAS_ALL | |
619 (TREE_TYPE (TREE_OPERAND (inner, 0))))) | |
620 return 0; | |
621 } | 670 } |
622 | 671 |
623 /* Otherwise, pick up the outermost object that we could have a pointer | 672 /* Otherwise, pick up the outermost object that we could have a pointer |
624 to, processing conversions as above. */ | 673 to, processing conversions as above. */ |
625 while (component_uses_parent_alias_set (t)) | 674 while (component_uses_parent_alias_set (t)) |
638 /* Now all we care about is the type. */ | 687 /* Now all we care about is the type. */ |
639 t = TREE_TYPE (t); | 688 t = TREE_TYPE (t); |
640 } | 689 } |
641 | 690 |
642 /* Variant qualifiers don't affect the alias set, so get the main | 691 /* Variant qualifiers don't affect the alias set, so get the main |
643 variant. Always use the canonical type as well. | 692 variant. */ |
644 If this is a type with a known alias set, return it. */ | |
645 t = TYPE_MAIN_VARIANT (t); | 693 t = TYPE_MAIN_VARIANT (t); |
646 if (TYPE_CANONICAL (t)) | 694 |
647 t = TYPE_CANONICAL (t); | 695 /* Always use the canonical type as well. If this is a type that |
696 requires structural comparisons to identify compatible types | |
697 use alias set zero. */ | |
698 if (TYPE_STRUCTURAL_EQUALITY_P (t)) | |
699 { | |
700 /* Allow the language to specify another alias set for this | |
701 type. */ | |
702 set = lang_hooks.get_alias_set (t); | |
703 if (set != -1) | |
704 return set; | |
705 return 0; | |
706 } | |
707 t = TYPE_CANONICAL (t); | |
708 /* Canonical types shouldn't form a tree nor should the canonical | |
709 type require structural equality checks. */ | |
710 gcc_assert (!TYPE_STRUCTURAL_EQUALITY_P (t) && TYPE_CANONICAL (t) == t); | |
711 | |
712 /* If this is a type with a known alias set, return it. */ | |
648 if (TYPE_ALIAS_SET_KNOWN_P (t)) | 713 if (TYPE_ALIAS_SET_KNOWN_P (t)) |
649 return TYPE_ALIAS_SET (t); | 714 return TYPE_ALIAS_SET (t); |
650 | 715 |
651 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */ | 716 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */ |
652 if (!COMPLETE_TYPE_P (t)) | 717 if (!COMPLETE_TYPE_P (t)) |
750 superset_entry = get_alias_set_entry (superset); | 815 superset_entry = get_alias_set_entry (superset); |
751 if (superset_entry == 0) | 816 if (superset_entry == 0) |
752 { | 817 { |
753 /* Create an entry for the SUPERSET, so that we have a place to | 818 /* Create an entry for the SUPERSET, so that we have a place to |
754 attach the SUBSET. */ | 819 attach the SUBSET. */ |
755 superset_entry = GGC_NEW (struct alias_set_entry); | 820 superset_entry = GGC_NEW (struct alias_set_entry_d); |
756 superset_entry->alias_set = superset; | 821 superset_entry->alias_set = superset; |
757 superset_entry->children | 822 superset_entry->children |
758 = splay_tree_new_ggc (splay_tree_compare_ints); | 823 = splay_tree_new_ggc (splay_tree_compare_ints); |
759 superset_entry->has_zero_child = 0; | 824 superset_entry->has_zero_child = 0; |
760 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry); | 825 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry); |
977 return src_1; | 1042 return src_1; |
978 | 1043 |
979 /* Guess which operand is the base address: | 1044 /* Guess which operand is the base address: |
980 If either operand is a symbol, then it is the base. If | 1045 If either operand is a symbol, then it is the base. If |
981 either operand is a CONST_INT, then the other is the base. */ | 1046 either operand is a CONST_INT, then the other is the base. */ |
982 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0)) | 1047 if (CONST_INT_P (src_1) || CONSTANT_P (src_0)) |
983 return find_base_value (src_0); | 1048 return find_base_value (src_0); |
984 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1)) | 1049 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1)) |
985 return find_base_value (src_1); | 1050 return find_base_value (src_1); |
986 | 1051 |
987 return 0; | 1052 return 0; |
988 } | 1053 } |
989 | 1054 |
993 return find_base_value (XEXP (src, 1)); | 1058 return find_base_value (XEXP (src, 1)); |
994 | 1059 |
995 case AND: | 1060 case AND: |
996 /* If the second operand is constant set the base | 1061 /* If the second operand is constant set the base |
997 address to the first operand. */ | 1062 address to the first operand. */ |
998 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0) | 1063 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0) |
999 return find_base_value (XEXP (src, 0)); | 1064 return find_base_value (XEXP (src, 0)); |
1000 return 0; | 1065 return 0; |
1001 | 1066 |
1002 case TRUNCATE: | 1067 case TRUNCATE: |
1068 /* As we do not know which address space the pointer is refering to, we can | |
1069 handle this only if the target does not support different pointer or | |
1070 address modes depending on the address space. */ | |
1071 if (!target_default_pointer_address_modes_p ()) | |
1072 break; | |
1003 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode)) | 1073 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode)) |
1004 break; | 1074 break; |
1005 /* Fall through. */ | 1075 /* Fall through. */ |
1006 case HIGH: | 1076 case HIGH: |
1007 case PRE_INC: | 1077 case PRE_INC: |
1012 case POST_MODIFY: | 1082 case POST_MODIFY: |
1013 return find_base_value (XEXP (src, 0)); | 1083 return find_base_value (XEXP (src, 0)); |
1014 | 1084 |
1015 case ZERO_EXTEND: | 1085 case ZERO_EXTEND: |
1016 case SIGN_EXTEND: /* used for NT/Alpha pointers */ | 1086 case SIGN_EXTEND: /* used for NT/Alpha pointers */ |
1087 /* As we do not know which address space the pointer is refering to, we can | |
1088 handle this only if the target does not support different pointer or | |
1089 address modes depending on the address space. */ | |
1090 if (!target_default_pointer_address_modes_p ()) | |
1091 break; | |
1092 | |
1017 { | 1093 { |
1018 rtx temp = find_base_value (XEXP (src, 0)); | 1094 rtx temp = find_base_value (XEXP (src, 0)); |
1019 | 1095 |
1020 if (temp != 0 && CONSTANT_P (temp)) | 1096 if (temp != 0 && CONSTANT_P (temp)) |
1021 temp = convert_memory_address (Pmode, temp); | 1097 temp = convert_memory_address (Pmode, temp); |
1135 if (! other || find_base_value (other)) | 1211 if (! other || find_base_value (other)) |
1136 new_reg_base_value[regno] = 0; | 1212 new_reg_base_value[regno] = 0; |
1137 break; | 1213 break; |
1138 } | 1214 } |
1139 case AND: | 1215 case AND: |
1140 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT) | 1216 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1))) |
1141 new_reg_base_value[regno] = 0; | 1217 new_reg_base_value[regno] = 0; |
1142 break; | 1218 break; |
1143 default: | 1219 default: |
1144 new_reg_base_value[regno] = 0; | 1220 new_reg_base_value[regno] = 0; |
1145 break; | 1221 break; |
1228 rtx x0 = canon_rtx (XEXP (x, 0)); | 1304 rtx x0 = canon_rtx (XEXP (x, 0)); |
1229 rtx x1 = canon_rtx (XEXP (x, 1)); | 1305 rtx x1 = canon_rtx (XEXP (x, 1)); |
1230 | 1306 |
1231 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1)) | 1307 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1)) |
1232 { | 1308 { |
1233 if (GET_CODE (x0) == CONST_INT) | 1309 if (CONST_INT_P (x0)) |
1234 return plus_constant (x1, INTVAL (x0)); | 1310 return plus_constant (x1, INTVAL (x0)); |
1235 else if (GET_CODE (x1) == CONST_INT) | 1311 else if (CONST_INT_P (x1)) |
1236 return plus_constant (x0, INTVAL (x1)); | 1312 return plus_constant (x0, INTVAL (x1)); |
1237 return gen_rtx_PLUS (GET_MODE (x), x0, x1); | 1313 return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
1238 } | 1314 } |
1239 } | 1315 } |
1240 | 1316 |
1404 { | 1480 { |
1405 case REG: | 1481 case REG: |
1406 return REG_BASE_VALUE (x); | 1482 return REG_BASE_VALUE (x); |
1407 | 1483 |
1408 case TRUNCATE: | 1484 case TRUNCATE: |
1485 /* As we do not know which address space the pointer is refering to, we can | |
1486 handle this only if the target does not support different pointer or | |
1487 address modes depending on the address space. */ | |
1488 if (!target_default_pointer_address_modes_p ()) | |
1489 return 0; | |
1409 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode)) | 1490 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode)) |
1410 return 0; | 1491 return 0; |
1411 /* Fall through. */ | 1492 /* Fall through. */ |
1412 case HIGH: | 1493 case HIGH: |
1413 case PRE_INC: | 1494 case PRE_INC: |
1418 case POST_MODIFY: | 1499 case POST_MODIFY: |
1419 return find_base_term (XEXP (x, 0)); | 1500 return find_base_term (XEXP (x, 0)); |
1420 | 1501 |
1421 case ZERO_EXTEND: | 1502 case ZERO_EXTEND: |
1422 case SIGN_EXTEND: /* Used for Alpha/NT pointers */ | 1503 case SIGN_EXTEND: /* Used for Alpha/NT pointers */ |
1504 /* As we do not know which address space the pointer is refering to, we can | |
1505 handle this only if the target does not support different pointer or | |
1506 address modes depending on the address space. */ | |
1507 if (!target_default_pointer_address_modes_p ()) | |
1508 return 0; | |
1509 | |
1423 { | 1510 { |
1424 rtx temp = find_base_term (XEXP (x, 0)); | 1511 rtx temp = find_base_term (XEXP (x, 0)); |
1425 | 1512 |
1426 if (temp != 0 && CONSTANT_P (temp)) | 1513 if (temp != 0 && CONSTANT_P (temp)) |
1427 temp = convert_memory_address (Pmode, temp); | 1514 temp = convert_memory_address (Pmode, temp); |
1470 return find_base_term (tmp2); | 1557 return find_base_term (tmp2); |
1471 | 1558 |
1472 /* If either operand is known to be a pointer, then use it | 1559 /* If either operand is known to be a pointer, then use it |
1473 to determine the base term. */ | 1560 to determine the base term. */ |
1474 if (REG_P (tmp1) && REG_POINTER (tmp1)) | 1561 if (REG_P (tmp1) && REG_POINTER (tmp1)) |
1475 return find_base_term (tmp1); | 1562 { |
1563 rtx base = find_base_term (tmp1); | |
1564 if (base) | |
1565 return base; | |
1566 } | |
1476 | 1567 |
1477 if (REG_P (tmp2) && REG_POINTER (tmp2)) | 1568 if (REG_P (tmp2) && REG_POINTER (tmp2)) |
1478 return find_base_term (tmp2); | 1569 { |
1570 rtx base = find_base_term (tmp2); | |
1571 if (base) | |
1572 return base; | |
1573 } | |
1479 | 1574 |
1480 /* Neither operand was known to be a pointer. Go ahead and find the | 1575 /* Neither operand was known to be a pointer. Go ahead and find the |
1481 base term for both operands. */ | 1576 base term for both operands. */ |
1482 tmp1 = find_base_term (tmp1); | 1577 tmp1 = find_base_term (tmp1); |
1483 tmp2 = find_base_term (tmp2); | 1578 tmp2 = find_base_term (tmp2); |
1504 nothing from the base alias check. */ | 1599 nothing from the base alias check. */ |
1505 return 0; | 1600 return 0; |
1506 } | 1601 } |
1507 | 1602 |
1508 case AND: | 1603 case AND: |
1509 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0) | 1604 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0) |
1510 return find_base_term (XEXP (x, 0)); | 1605 return find_base_term (XEXP (x, 0)); |
1511 return 0; | 1606 return 0; |
1512 | 1607 |
1513 case SYMBOL_REF: | 1608 case SYMBOL_REF: |
1514 case LABEL_REF: | 1609 case LABEL_REF: |
1567 via AND address can alias all surrounding object types except those | 1662 via AND address can alias all surrounding object types except those |
1568 with aligment 8 or higher. */ | 1663 with aligment 8 or higher. */ |
1569 if (GET_CODE (x) == AND && GET_CODE (y) == AND) | 1664 if (GET_CODE (x) == AND && GET_CODE (y) == AND) |
1570 return 1; | 1665 return 1; |
1571 if (GET_CODE (x) == AND | 1666 if (GET_CODE (x) == AND |
1572 && (GET_CODE (XEXP (x, 1)) != CONST_INT | 1667 && (!CONST_INT_P (XEXP (x, 1)) |
1573 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1)))) | 1668 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1)))) |
1574 return 1; | 1669 return 1; |
1575 if (GET_CODE (y) == AND | 1670 if (GET_CODE (y) == AND |
1576 && (GET_CODE (XEXP (y, 1)) != CONST_INT | 1671 && (!CONST_INT_P (XEXP (y, 1)) |
1577 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1)))) | 1672 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1)))) |
1578 return 1; | 1673 return 1; |
1579 | 1674 |
1580 /* Differing symbols not accessed via AND never alias. */ | 1675 /* Differing symbols not accessed via AND never alias. */ |
1581 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS) | 1676 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS) |
1732 | 1827 |
1733 if (rtx_equal_for_memref_p (x1, y1)) | 1828 if (rtx_equal_for_memref_p (x1, y1)) |
1734 return memrefs_conflict_p (xsize, x0, ysize, y0, c); | 1829 return memrefs_conflict_p (xsize, x0, ysize, y0, c); |
1735 if (rtx_equal_for_memref_p (x0, y0)) | 1830 if (rtx_equal_for_memref_p (x0, y0)) |
1736 return memrefs_conflict_p (xsize, x1, ysize, y1, c); | 1831 return memrefs_conflict_p (xsize, x1, ysize, y1, c); |
1737 if (GET_CODE (x1) == CONST_INT) | 1832 if (CONST_INT_P (x1)) |
1738 { | 1833 { |
1739 if (GET_CODE (y1) == CONST_INT) | 1834 if (CONST_INT_P (y1)) |
1740 return memrefs_conflict_p (xsize, x0, ysize, y0, | 1835 return memrefs_conflict_p (xsize, x0, ysize, y0, |
1741 c - INTVAL (x1) + INTVAL (y1)); | 1836 c - INTVAL (x1) + INTVAL (y1)); |
1742 else | 1837 else |
1743 return memrefs_conflict_p (xsize, x0, ysize, y, | 1838 return memrefs_conflict_p (xsize, x0, ysize, y, |
1744 c - INTVAL (x1)); | 1839 c - INTVAL (x1)); |
1745 } | 1840 } |
1746 else if (GET_CODE (y1) == CONST_INT) | 1841 else if (CONST_INT_P (y1)) |
1747 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); | 1842 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); |
1748 | 1843 |
1749 return 1; | 1844 return 1; |
1750 } | 1845 } |
1751 else if (GET_CODE (x1) == CONST_INT) | 1846 else if (CONST_INT_P (x1)) |
1752 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1)); | 1847 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1)); |
1753 } | 1848 } |
1754 else if (GET_CODE (y) == PLUS) | 1849 else if (GET_CODE (y) == PLUS) |
1755 { | 1850 { |
1756 /* The fact that Y is canonicalized means that this | 1851 /* The fact that Y is canonicalized means that this |
1757 PLUS rtx is canonicalized. */ | 1852 PLUS rtx is canonicalized. */ |
1758 rtx y0 = XEXP (y, 0); | 1853 rtx y0 = XEXP (y, 0); |
1759 rtx y1 = XEXP (y, 1); | 1854 rtx y1 = XEXP (y, 1); |
1760 | 1855 |
1761 if (GET_CODE (y1) == CONST_INT) | 1856 if (CONST_INT_P (y1)) |
1762 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); | 1857 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); |
1763 else | 1858 else |
1764 return 1; | 1859 return 1; |
1765 } | 1860 } |
1766 | 1861 |
1782 if (rtx_equal_for_memref_p (x0, y0)) | 1877 if (rtx_equal_for_memref_p (x0, y0)) |
1783 return (xsize == 0 || ysize == 0 | 1878 return (xsize == 0 || ysize == 0 |
1784 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); | 1879 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); |
1785 | 1880 |
1786 /* Can't properly adjust our sizes. */ | 1881 /* Can't properly adjust our sizes. */ |
1787 if (GET_CODE (x1) != CONST_INT) | 1882 if (!CONST_INT_P (x1)) |
1788 return 1; | 1883 return 1; |
1789 xsize /= INTVAL (x1); | 1884 xsize /= INTVAL (x1); |
1790 ysize /= INTVAL (x1); | 1885 ysize /= INTVAL (x1); |
1791 c /= INTVAL (x1); | 1886 c /= INTVAL (x1); |
1792 return memrefs_conflict_p (xsize, x0, ysize, y0, c); | 1887 return memrefs_conflict_p (xsize, x0, ysize, y0, c); |
1798 | 1893 |
1799 /* Treat an access through an AND (e.g. a subword access on an Alpha) | 1894 /* Treat an access through an AND (e.g. a subword access on an Alpha) |
1800 as an access with indeterminate size. Assume that references | 1895 as an access with indeterminate size. Assume that references |
1801 besides AND are aligned, so if the size of the other reference is | 1896 besides AND are aligned, so if the size of the other reference is |
1802 at least as large as the alignment, assume no other overlap. */ | 1897 at least as large as the alignment, assume no other overlap. */ |
1803 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT) | 1898 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))) |
1804 { | 1899 { |
1805 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1))) | 1900 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1))) |
1806 xsize = -1; | 1901 xsize = -1; |
1807 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c); | 1902 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c); |
1808 } | 1903 } |
1809 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT) | 1904 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1))) |
1810 { | 1905 { |
1811 /* ??? If we are indexing far enough into the array/structure, we | 1906 /* ??? If we are indexing far enough into the array/structure, we |
1812 may yet be able to determine that we can not overlap. But we | 1907 may yet be able to determine that we can not overlap. But we |
1813 also need to that we are far enough from the end not to overlap | 1908 also need to that we are far enough from the end not to overlap |
1814 a following reference, so we do nothing with that for now. */ | 1909 a following reference, so we do nothing with that for now. */ |
1817 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c); | 1912 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c); |
1818 } | 1913 } |
1819 | 1914 |
1820 if (CONSTANT_P (x)) | 1915 if (CONSTANT_P (x)) |
1821 { | 1916 { |
1822 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT) | 1917 if (CONST_INT_P (x) && CONST_INT_P (y)) |
1823 { | 1918 { |
1824 c += (INTVAL (y) - INTVAL (x)); | 1919 c += (INTVAL (y) - INTVAL (x)); |
1825 return (xsize <= 0 || ysize <= 0 | 1920 return (xsize <= 0 || ysize <= 0 |
1826 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); | 1921 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); |
1827 } | 1922 } |
2108 } | 2203 } |
2109 | 2204 |
2110 if (! DECL_P (exprx) || ! DECL_P (expry)) | 2205 if (! DECL_P (exprx) || ! DECL_P (expry)) |
2111 return 0; | 2206 return 0; |
2112 | 2207 |
2208 /* With invalid code we can end up storing into the constant pool. | |
2209 Bail out to avoid ICEing when creating RTL for this. | |
2210 See gfortran.dg/lto/20091028-2_0.f90. */ | |
2211 if (TREE_CODE (exprx) == CONST_DECL | |
2212 || TREE_CODE (expry) == CONST_DECL) | |
2213 return 1; | |
2214 | |
2113 rtlx = DECL_RTL (exprx); | 2215 rtlx = DECL_RTL (exprx); |
2114 rtly = DECL_RTL (expry); | 2216 rtly = DECL_RTL (expry); |
2115 | 2217 |
2116 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they | 2218 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they |
2117 can't overlap unless they are the same because we never reuse that part | 2219 can't overlap unless they are the same because we never reuse that part |
2118 of the stack frame used for locals for spilled pseudos. */ | 2220 of the stack frame used for locals for spilled pseudos. */ |
2119 if ((!MEM_P (rtlx) || !MEM_P (rtly)) | 2221 if ((!MEM_P (rtlx) || !MEM_P (rtly)) |
2120 && ! rtx_equal_p (rtlx, rtly)) | 2222 && ! rtx_equal_p (rtlx, rtly)) |
2121 return 1; | 2223 return 1; |
2122 | 2224 |
2225 /* If we have MEMs refering to different address spaces (which can | |
2226 potentially overlap), we cannot easily tell from the addresses | |
2227 whether the references overlap. */ | |
2228 if (MEM_P (rtlx) && MEM_P (rtly) | |
2229 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly)) | |
2230 return 0; | |
2231 | |
2123 /* Get the base and offsets of both decls. If either is a register, we | 2232 /* Get the base and offsets of both decls. If either is a register, we |
2124 know both are and are the same, so use that as the base. The only | 2233 know both are and are the same, so use that as the base. The only |
2125 we can avoid overlap is if we can deduce that they are nonoverlapping | 2234 we can avoid overlap is if we can deduce that they are nonoverlapping |
2126 pieces of that decl, which is very rare. */ | 2235 pieces of that decl, which is very rare. */ |
2127 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx; | 2236 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx; |
2128 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT) | 2237 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1))) |
2129 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0); | 2238 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0); |
2130 | 2239 |
2131 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly; | 2240 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly; |
2132 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT) | 2241 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1))) |
2133 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0); | 2242 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0); |
2134 | 2243 |
2135 /* If the bases are different, we know they do not overlap if both | 2244 /* If the bases are different, we know they do not overlap if both |
2136 are constants or if one is a constant and the other a pointer into the | 2245 are constants or if one is a constant and the other a pointer into the |
2137 stack frame. Otherwise a different base means we can't tell if they | 2246 stack frame. Otherwise a different base means we can't tell if they |
2209 return 0; | 2318 return 0; |
2210 | 2319 |
2211 if (nonoverlapping_memrefs_p (mem, x)) | 2320 if (nonoverlapping_memrefs_p (mem, x)) |
2212 return 0; | 2321 return 0; |
2213 | 2322 |
2323 /* If we have MEMs refering to different address spaces (which can | |
2324 potentially overlap), we cannot easily tell from the addresses | |
2325 whether the references overlap. */ | |
2326 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x)) | |
2327 return 1; | |
2328 | |
2214 if (mem_mode == VOIDmode) | 2329 if (mem_mode == VOIDmode) |
2215 mem_mode = GET_MODE (mem); | 2330 mem_mode = GET_MODE (mem); |
2216 | 2331 |
2217 x_addr = get_addr (XEXP (x, 0)); | 2332 x_addr = get_addr (XEXP (x, 0)); |
2218 mem_addr = get_addr (XEXP (mem, 0)); | 2333 mem_addr = get_addr (XEXP (mem, 0)); |
2244 /* In true_dependence we also allow BLKmode to alias anything. Why | 2359 /* In true_dependence we also allow BLKmode to alias anything. Why |
2245 don't we do this in anti_dependence and output_dependence? */ | 2360 don't we do this in anti_dependence and output_dependence? */ |
2246 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode) | 2361 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode) |
2247 return 1; | 2362 return 1; |
2248 | 2363 |
2249 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, | 2364 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies)) |
2250 varies); | 2365 return 0; |
2366 | |
2367 return rtx_refs_may_alias_p (x, mem, true); | |
2251 } | 2368 } |
2252 | 2369 |
2253 /* Canonical true dependence: X is read after store in MEM takes place. | 2370 /* Canonical true dependence: X is read after store in MEM takes place. |
2254 Variant of true_dependence which assumes MEM has already been | 2371 Variant of true_dependence which assumes MEM has already been |
2255 canonicalized (hence we no longer do that here). | 2372 canonicalized (hence we no longer do that here). |
2284 return 0; | 2401 return 0; |
2285 | 2402 |
2286 if (nonoverlapping_memrefs_p (x, mem)) | 2403 if (nonoverlapping_memrefs_p (x, mem)) |
2287 return 0; | 2404 return 0; |
2288 | 2405 |
2406 /* If we have MEMs refering to different address spaces (which can | |
2407 potentially overlap), we cannot easily tell from the addresses | |
2408 whether the references overlap. */ | |
2409 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x)) | |
2410 return 1; | |
2411 | |
2289 if (! x_addr) | 2412 if (! x_addr) |
2290 x_addr = get_addr (XEXP (x, 0)); | 2413 x_addr = get_addr (XEXP (x, 0)); |
2291 | 2414 |
2292 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode)) | 2415 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode)) |
2293 return 0; | 2416 return 0; |
2308 /* In true_dependence we also allow BLKmode to alias anything. Why | 2431 /* In true_dependence we also allow BLKmode to alias anything. Why |
2309 don't we do this in anti_dependence and output_dependence? */ | 2432 don't we do this in anti_dependence and output_dependence? */ |
2310 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode) | 2433 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode) |
2311 return 1; | 2434 return 1; |
2312 | 2435 |
2313 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, | 2436 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies)) |
2314 varies); | 2437 return 0; |
2438 | |
2439 return rtx_refs_may_alias_p (x, mem, true); | |
2315 } | 2440 } |
2316 | 2441 |
2317 /* Returns nonzero if a write to X might alias a previous read from | 2442 /* Returns nonzero if a write to X might alias a previous read from |
2318 (or, if WRITEP is nonzero, a write to) MEM. */ | 2443 (or, if WRITEP is nonzero, a write to) MEM. */ |
2319 | 2444 |
2335 return 1; | 2460 return 1; |
2336 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER | 2461 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER |
2337 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER) | 2462 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER) |
2338 return 1; | 2463 return 1; |
2339 | 2464 |
2340 if (DIFFERENT_ALIAS_SETS_P (x, mem)) | |
2341 return 0; | |
2342 | |
2343 /* A read from read-only memory can't conflict with read-write memory. */ | 2465 /* A read from read-only memory can't conflict with read-write memory. */ |
2344 if (!writep && MEM_READONLY_P (mem)) | 2466 if (!writep && MEM_READONLY_P (mem)) |
2345 return 0; | 2467 return 0; |
2346 | 2468 |
2347 if (nonoverlapping_memrefs_p (x, mem)) | 2469 if (nonoverlapping_memrefs_p (x, mem)) |
2348 return 0; | 2470 return 0; |
2471 | |
2472 /* If we have MEMs refering to different address spaces (which can | |
2473 potentially overlap), we cannot easily tell from the addresses | |
2474 whether the references overlap. */ | |
2475 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x)) | |
2476 return 1; | |
2349 | 2477 |
2350 x_addr = get_addr (XEXP (x, 0)); | 2478 x_addr = get_addr (XEXP (x, 0)); |
2351 mem_addr = get_addr (XEXP (mem, 0)); | 2479 mem_addr = get_addr (XEXP (mem, 0)); |
2352 | 2480 |
2353 if (! writep) | 2481 if (! writep) |
2372 | 2500 |
2373 fixed_scalar | 2501 fixed_scalar |
2374 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, | 2502 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, |
2375 rtx_addr_varies_p); | 2503 rtx_addr_varies_p); |
2376 | 2504 |
2377 return (!(fixed_scalar == mem && !aliases_everything_p (x)) | 2505 if ((fixed_scalar == mem && !aliases_everything_p (x)) |
2378 && !(fixed_scalar == x && !aliases_everything_p (mem))); | 2506 || (fixed_scalar == x && !aliases_everything_p (mem))) |
2507 return 0; | |
2508 | |
2509 return rtx_refs_may_alias_p (x, mem, false); | |
2379 } | 2510 } |
2380 | 2511 |
2381 /* Anti dependence: X is written after read in MEM takes place. */ | 2512 /* Anti dependence: X is written after read in MEM takes place. */ |
2382 | 2513 |
2383 int | 2514 int |
2584 } | 2715 } |
2585 else if (DF_REG_DEF_COUNT (regno) == 1 | 2716 else if (DF_REG_DEF_COUNT (regno) == 1 |
2586 && GET_CODE (src) == PLUS | 2717 && GET_CODE (src) == PLUS |
2587 && REG_P (XEXP (src, 0)) | 2718 && REG_P (XEXP (src, 0)) |
2588 && (t = get_reg_known_value (REGNO (XEXP (src, 0)))) | 2719 && (t = get_reg_known_value (REGNO (XEXP (src, 0)))) |
2589 && GET_CODE (XEXP (src, 1)) == CONST_INT) | 2720 && CONST_INT_P (XEXP (src, 1))) |
2590 { | 2721 { |
2591 t = plus_constant (t, INTVAL (XEXP (src, 1))); | 2722 t = plus_constant (t, INTVAL (XEXP (src, 1))); |
2592 set_reg_known_value (regno, t); | 2723 set_reg_known_value (regno, t); |
2593 set_reg_known_equiv_p (regno, 0); | 2724 set_reg_known_equiv_p (regno, 0); |
2594 } | 2725 } |