comparison gcc/tree-ssa-sccvn.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents 58ad6c70ea60
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
322 322
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant); 323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 } 324 }
325 325
326 /* Hash table hash function for vn_constant_t. */ 326 /* Hash table hash function for vn_constant_t. */
327 327
328 static hashval_t 328 static hashval_t
329 vn_constant_hash (const void *p1) 329 vn_constant_hash (const void *p1)
330 { 330 {
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1; 331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode; 332 return vc1->hashcode;
356 unsigned int 356 unsigned int
357 get_or_alloc_constant_value_id (tree constant) 357 get_or_alloc_constant_value_id (tree constant)
358 { 358 {
359 void **slot; 359 void **slot;
360 vn_constant_t vc = XNEW (struct vn_constant_s); 360 vn_constant_t vc = XNEW (struct vn_constant_s);
361 361
362 vc->hashcode = vn_hash_constant_with_type (constant); 362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant; 363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc, 364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT); 365 vc->hashcode, INSERT);
366 if (*slot) 366 if (*slot)
367 { 367 {
368 free (vc); 368 free (vc);
369 return ((vn_constant_t)*slot)->value_id; 369 return ((vn_constant_t)*slot)->value_id;
370 } 370 }
377 /* Return true if V is a value id for a constant. */ 377 /* Return true if V is a value id for a constant. */
378 378
379 bool 379 bool
380 value_id_constant_p (unsigned int v) 380 value_id_constant_p (unsigned int v)
381 { 381 {
382 return bitmap_bit_p (constant_value_ids, v); 382 return bitmap_bit_p (constant_value_ids, v);
383 } 383 }
384 384
385 /* Compare two reference operands P1 and P2 for equality. Return true if 385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */ 386 they are equal, and false otherwise. */
387 387
425 /* Compute a hash for the reference operation VR1 and return it. */ 425 /* Compute a hash for the reference operation VR1 and return it. */
426 426
427 hashval_t 427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1) 428 vn_reference_compute_hash (const vn_reference_t vr1)
429 { 429 {
430 hashval_t result = 0; 430 hashval_t result;
431 tree v;
432 int i; 431 int i;
433 vn_reference_op_t vro; 432 vn_reference_op_t vro;
434 433
435 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++) 434 result = iterative_hash_expr (vr1->vuse, 0);
436 result += iterative_hash_expr (v, 0);
437 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) 435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
438 result += vn_reference_op_compute_hash (vro); 436 result += vn_reference_op_compute_hash (vro);
439 437
440 return result; 438 return result;
441 } 439 }
444 means they have the same set of operands and vuses. */ 442 means they have the same set of operands and vuses. */
445 443
446 int 444 int
447 vn_reference_eq (const void *p1, const void *p2) 445 vn_reference_eq (const void *p1, const void *p2)
448 { 446 {
449 tree v;
450 int i; 447 int i;
451 vn_reference_op_t vro; 448 vn_reference_op_t vro;
452 449
453 const_vn_reference_t const vr1 = (const_vn_reference_t) p1; 450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
454 const_vn_reference_t const vr2 = (const_vn_reference_t) p2; 451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
455 if (vr1->hashcode != vr2->hashcode) 452 if (vr1->hashcode != vr2->hashcode)
456 return false; 453 return false;
457 454
458 if (vr1->vuses == vr2->vuses 455 /* Early out if this is not a hash collision. */
459 && vr1->operands == vr2->operands) 456 if (vr1->hashcode != vr2->hashcode)
457 return false;
458
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
461 return false;
462
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
460 return true; 465 return true;
461
462 /* Impossible for them to be equivalent if they have different
463 number of vuses. */
464 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
465 return false;
466 466
467 /* We require that address operands be canonicalized in a way that 467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are 468 two memory references will have the same operands if they are
469 equivalent. */ 469 equivalent. */
470 if (VEC_length (vn_reference_op_s, vr1->operands) 470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands)) 471 != VEC_length (vn_reference_op_s, vr2->operands))
472 return false; 472 return false;
473 473
474 /* The memory state is more often different than the address of the
475 store/load, so check it first. */
476 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
477 {
478 if (VEC_index (tree, vr2->vuses, i) != v)
479 return false;
480 }
481
482 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) 474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
483 { 475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
484 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i), 476 vro))
485 vro)) 477 return false;
486 return false; 478
487 }
488 return true; 479 return true;
489 }
490
491 /* Place the vuses from STMT into *result. */
492
493 static inline void
494 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
495 {
496 ssa_op_iter iter;
497 tree vuse;
498
499 if (!stmt)
500 return;
501
502 VEC_reserve_exact (tree, gc, *result,
503 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
504
505 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
506 VEC_quick_push (tree, *result, vuse);
507 }
508
509
510 /* Copy the VUSE names in STMT into a vector, and return
511 the vector. */
512
513 static VEC (tree, gc) *
514 copy_vuses_from_stmt (gimple stmt)
515 {
516 VEC (tree, gc) *vuses = NULL;
517
518 vuses_to_vec (stmt, &vuses);
519
520 return vuses;
521 }
522
523 /* Place the vdefs from STMT into *result. */
524
525 static inline void
526 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
527 {
528 ssa_op_iter iter;
529 tree vdef;
530
531 if (!stmt)
532 return;
533
534 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
535
536 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
537 VEC_quick_push (tree, *result, vdef);
538 }
539
540 /* Copy the names of vdef results in STMT into a vector, and return
541 the vector. */
542
543 static VEC (tree, gc) *
544 copy_vdefs_from_stmt (gimple stmt)
545 {
546 VEC (tree, gc) *vdefs = NULL;
547
548 vdefs_to_vec (stmt, &vdefs);
549
550 return vdefs;
551 }
552
553 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
554 static VEC (tree, gc) *shared_lookup_vops;
555
556 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
557 This function will overwrite the current SHARED_LOOKUP_VOPS
558 variable. */
559
560 VEC (tree, gc) *
561 shared_vuses_from_stmt (gimple stmt)
562 {
563 VEC_truncate (tree, shared_lookup_vops, 0);
564 vuses_to_vec (stmt, &shared_lookup_vops);
565
566 return shared_lookup_vops;
567 } 480 }
568 481
569 /* Copy the operations present in load/store REF into RESULT, a vector of 482 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */ 483 vn_reference_op_s's. */
571 484
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) 486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
574 { 487 {
575 if (TREE_CODE (ref) == TARGET_MEM_REF) 488 if (TREE_CODE (ref) == TARGET_MEM_REF)
576 { 489 {
577 vn_reference_op_s temp; 490 vn_reference_op_s temp;
491 tree base;
492
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
494 if (!base)
495 base = build_int_cst (ptr_type_node, 0);
578 496
579 memset (&temp, 0, sizeof (temp)); 497 memset (&temp, 0, sizeof (temp));
580 /* We do not care for spurious type qualifications. */ 498 /* We do not care for spurious type qualifications. */
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref)); 499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
582 temp.opcode = TREE_CODE (ref); 500 temp.opcode = TREE_CODE (ref);
583 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref); 501 temp.op0 = TMR_INDEX (ref);
584 temp.op1 = TMR_INDEX (ref); 502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
585 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
586 505
587 memset (&temp, 0, sizeof (temp)); 506 memset (&temp, 0, sizeof (temp));
588 temp.type = NULL_TREE; 507 temp.type = NULL_TREE;
589 temp.opcode = TREE_CODE (ref); 508 temp.opcode = TREE_CODE (base);
590 temp.op0 = TMR_STEP (ref); 509 temp.op0 = base;
591 temp.op1 = TMR_OFFSET (ref); 510 temp.op1 = TMR_ORIGINAL (ref);
592 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
593 return; 512 return;
594 } 513 }
595 514
596 /* For non-calls, store the information that makes up the address. */ 515 /* For non-calls, store the information that makes up the address. */
622 case COMPONENT_REF: 541 case COMPONENT_REF:
623 /* The field decl is enough to unambiguously specify the field, 542 /* The field decl is enough to unambiguously specify the field,
624 a matching type is not necessary and a mismatching type 543 a matching type is not necessary and a mismatching type
625 is always a spurious difference. */ 544 is always a spurious difference. */
626 temp.type = NULL_TREE; 545 temp.type = NULL_TREE;
546 temp.op0 = TREE_OPERAND (ref, 1);
547 temp.op1 = TREE_OPERAND (ref, 2);
627 /* If this is a reference to a union member, record the union 548 /* If this is a reference to a union member, record the union
628 member size as operand. Do so only if we are doing 549 member size as operand. Do so only if we are doing
629 expression insertion (during FRE), as PRE currently gets 550 expression insertion (during FRE), as PRE currently gets
630 confused with this. */ 551 confused with this. */
631 if (may_insert 552 if (may_insert
632 && TREE_OPERAND (ref, 2) == NULL_TREE 553 && temp.op1 == NULL_TREE
633 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE 554 && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
634 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1))) 555 && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
635 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)))) 556 && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
636 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))); 557 && host_integerp (DECL_SIZE (temp.op0), 0))
637 else 558 temp.op0 = DECL_SIZE (temp.op0);
638 {
639 /* Record field as operand. */
640 temp.op0 = TREE_OPERAND (ref, 1);
641 temp.op1 = TREE_OPERAND (ref, 2);
642 }
643 break; 559 break;
644 case ARRAY_RANGE_REF: 560 case ARRAY_RANGE_REF:
645 case ARRAY_REF: 561 case ARRAY_REF:
646 /* Record index as operand. */ 562 /* Record index as operand. */
647 temp.op0 = TREE_OPERAND (ref, 1); 563 temp.op0 = TREE_OPERAND (ref, 1);
648 temp.op1 = TREE_OPERAND (ref, 2); 564 /* Always record lower bounds and element size. */
649 temp.op2 = TREE_OPERAND (ref, 3); 565 temp.op1 = array_ref_low_bound (ref);
566 temp.op2 = array_ref_element_size (ref);
650 break; 567 break;
651 case STRING_CST: 568 case STRING_CST:
652 case INTEGER_CST: 569 case INTEGER_CST:
653 case COMPLEX_CST: 570 case COMPLEX_CST:
654 case VECTOR_CST: 571 case VECTOR_CST:
689 else 606 else
690 ref = NULL_TREE; 607 ref = NULL_TREE;
691 } 608 }
692 } 609 }
693 610
694 /* Re-create a reference tree from the reference ops OPS. 611 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
695 Returns NULL_TREE if the ops were not handled. 612 operands in *OPS, the reference alias set SET and the reference type TYPE.
696 This routine needs to be kept in sync with copy_reference_ops_from_ref. */ 613 Return true if something useful was produced. */
697 614
698 static tree 615 bool
699 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops) 616 ao_ref_init_from_vn_reference (ao_ref *ref,
617 alias_set_type set, tree type,
618 VEC (vn_reference_op_s, heap) *ops)
700 { 619 {
701 vn_reference_op_t op; 620 vn_reference_op_t op;
702 unsigned i; 621 unsigned i;
703 tree ref, *op0_p = &ref; 622 tree base = NULL_TREE;
704 623 tree *op0_p = &base;
624 HOST_WIDE_INT offset = 0;
625 HOST_WIDE_INT max_size;
626 HOST_WIDE_INT size = -1;
627 tree size_tree = NULL_TREE;
628
629 /* First get the final access size from just the outermost expression. */
630 op = VEC_index (vn_reference_op_s, ops, 0);
631 if (op->opcode == COMPONENT_REF)
632 {
633 if (TREE_CODE (op->op0) == INTEGER_CST)
634 size_tree = op->op0;
635 else
636 size_tree = DECL_SIZE (op->op0);
637 }
638 else if (op->opcode == BIT_FIELD_REF)
639 size_tree = op->op0;
640 else
641 {
642 enum machine_mode mode = TYPE_MODE (type);
643 if (mode == BLKmode)
644 size_tree = TYPE_SIZE (type);
645 else
646 size = GET_MODE_BITSIZE (mode);
647 }
648 if (size_tree != NULL_TREE)
649 {
650 if (!host_integerp (size_tree, 1))
651 size = -1;
652 else
653 size = TREE_INT_CST_LOW (size_tree);
654 }
655
656 /* Initially, maxsize is the same as the accessed element size.
657 In the following it will only grow (or become -1). */
658 max_size = size;
659
660 /* Compute cumulative bit-offset for nested component-refs and array-refs,
661 and find the ultimate containing object. */
705 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i) 662 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
706 { 663 {
707 switch (op->opcode) 664 switch (op->opcode)
708 { 665 {
666 /* These may be in the reference ops, but we cannot do anything
667 sensible with them here. */
709 case CALL_EXPR: 668 case CALL_EXPR:
710 return NULL_TREE; 669 case ADDR_EXPR:
711 670 return false;
671
672 /* Record the base objects. */
712 case ALIGN_INDIRECT_REF: 673 case ALIGN_INDIRECT_REF:
713 case INDIRECT_REF: 674 case INDIRECT_REF:
714 *op0_p = build1 (op->opcode, op->type, NULL_TREE); 675 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
715 op0_p = &TREE_OPERAND (*op0_p, 0); 676 op0_p = &TREE_OPERAND (*op0_p, 0);
716 break; 677 break;
719 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type, 680 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
720 NULL_TREE, op->op0); 681 NULL_TREE, op->op0);
721 op0_p = &TREE_OPERAND (*op0_p, 0); 682 op0_p = &TREE_OPERAND (*op0_p, 0);
722 break; 683 break;
723 684
685 case VAR_DECL:
686 case PARM_DECL:
687 case RESULT_DECL:
688 case SSA_NAME:
689 *op0_p = op->op0;
690 break;
691
692 /* And now the usual component-reference style ops. */
724 case BIT_FIELD_REF: 693 case BIT_FIELD_REF:
725 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE, 694 offset += tree_low_cst (op->op1, 0);
726 op->op0, op->op1);
727 op0_p = &TREE_OPERAND (*op0_p, 0);
728 break; 695 break;
729 696
730 case COMPONENT_REF: 697 case COMPONENT_REF:
731 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE, 698 {
732 op->op0, op->op1); 699 tree field = op->op0;
733 op0_p = &TREE_OPERAND (*op0_p, 0); 700 /* We do not have a complete COMPONENT_REF tree here so we
734 break; 701 cannot use component_ref_field_offset. Do the interesting
702 parts manually. */
703
704 /* Our union trick, done for offset zero only. */
705 if (TREE_CODE (field) == INTEGER_CST)
706 ;
707 else if (op->op1
708 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
709 max_size = -1;
710 else
711 {
712 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
713 * BITS_PER_UNIT);
714 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
715 }
716 break;
717 }
735 718
736 case ARRAY_RANGE_REF: 719 case ARRAY_RANGE_REF:
737 case ARRAY_REF: 720 case ARRAY_REF:
738 *op0_p = build4 (op->opcode, op->type, NULL_TREE, 721 /* We recorded the lower bound and the element size. */
739 op->op0, op->op1, op->op2); 722 if (!host_integerp (op->op0, 0)
740 op0_p = &TREE_OPERAND (*op0_p, 0); 723 || !host_integerp (op->op1, 0)
724 || !host_integerp (op->op2, 0))
725 max_size = -1;
726 else
727 {
728 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
729 hindex -= TREE_INT_CST_LOW (op->op1);
730 hindex *= TREE_INT_CST_LOW (op->op2);
731 hindex *= BITS_PER_UNIT;
732 offset += hindex;
733 }
734 break;
735
736 case REALPART_EXPR:
737 break;
738
739 case IMAGPART_EXPR:
740 offset += size;
741 break;
742
743 case VIEW_CONVERT_EXPR:
741 break; 744 break;
742 745
743 case STRING_CST: 746 case STRING_CST:
744 case INTEGER_CST: 747 case INTEGER_CST:
745 case COMPLEX_CST: 748 case COMPLEX_CST:
746 case VECTOR_CST: 749 case VECTOR_CST:
747 case REAL_CST: 750 case REAL_CST:
748 case CONSTRUCTOR: 751 case CONSTRUCTOR:
749 case VAR_DECL:
750 case PARM_DECL:
751 case CONST_DECL: 752 case CONST_DECL:
752 case RESULT_DECL: 753 return false;
753 case SSA_NAME:
754 *op0_p = op->op0;
755 break;
756
757 case ADDR_EXPR:
758 if (op->op0 != NULL_TREE)
759 {
760 gcc_assert (is_gimple_min_invariant (op->op0));
761 *op0_p = op->op0;
762 break;
763 }
764 /* Fallthrough. */
765 case IMAGPART_EXPR:
766 case REALPART_EXPR:
767 case VIEW_CONVERT_EXPR:
768 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
769 op0_p = &TREE_OPERAND (*op0_p, 0);
770 break;
771 754
772 default: 755 default:
773 return NULL_TREE; 756 return false;
774 } 757 }
775 } 758 }
776 759
777 return ref; 760 if (base == NULL_TREE)
761 return false;
762
763 ref->ref = NULL_TREE;
764 ref->base = base;
765 ref->offset = offset;
766 ref->size = size;
767 ref->max_size = max_size;
768 ref->ref_alias_set = set;
769 ref->base_alias_set = -1;
770
771 return true;
778 } 772 }
779 773
780 /* Copy the operations present in load/store/call REF into RESULT, a vector of 774 /* Copy the operations present in load/store/call REF into RESULT, a vector of
781 vn_reference_op_s's. */ 775 vn_reference_op_s's. */
782 776
826 820
827 copy_reference_ops_from_call (call, &result); 821 copy_reference_ops_from_call (call, &result);
828 return result; 822 return result;
829 } 823 }
830 824
831 static VEC(vn_reference_op_s, heap) *shared_lookup_references; 825 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
832 826 *I_P to point to the last element of the replacement. */
833 /* Create a vector of vn_reference_op_s structures from REF, a 827 void
834 REFERENCE_CLASS_P tree. The vector is shared among all callers of 828 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
835 this function. */ 829 unsigned int *i_p)
836 830 {
837 static VEC(vn_reference_op_s, heap) * 831 VEC(vn_reference_op_s, heap) *mem = NULL;
838 shared_reference_ops_from_ref (tree ref) 832 vn_reference_op_t op;
839 { 833 unsigned int i = *i_p;
840 if (!ref) 834 unsigned int j;
841 return NULL; 835
842 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); 836 /* Get ops for the addressed object. */
843 copy_reference_ops_from_ref (ref, &shared_lookup_references); 837 op = VEC_index (vn_reference_op_s, *ops, i);
844 return shared_lookup_references; 838 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
845 } 839 around it to avoid later ICEs. */
846 840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
847 /* Create a vector of vn_reference_op_s structures from CALL, a 841 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
848 call statement. The vector is shared among all callers of 842 {
849 this function. */ 843 vn_reference_op_s aref;
850 844 tree dom;
851 static VEC(vn_reference_op_s, heap) * 845 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
852 shared_reference_ops_from_call (gimple call) 846 aref.opcode = ARRAY_REF;
853 { 847 aref.op0 = integer_zero_node;
854 if (!call) 848 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
855 return NULL; 849 && TYPE_MIN_VALUE (dom))
856 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); 850 aref.op0 = TYPE_MIN_VALUE (dom);
857 copy_reference_ops_from_call (call, &shared_lookup_references); 851 aref.op1 = aref.op0;
858 return shared_lookup_references; 852 aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
859 } 853 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
860 854 }
855 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
856
857 /* Do the replacement - we should have at least one op in mem now. */
858 if (VEC_length (vn_reference_op_s, mem) == 1)
859 {
860 VEC_replace (vn_reference_op_s, *ops, i - 1,
861 VEC_index (vn_reference_op_s, mem, 0));
862 VEC_ordered_remove (vn_reference_op_s, *ops, i);
863 i--;
864 }
865 else if (VEC_length (vn_reference_op_s, mem) == 2)
866 {
867 VEC_replace (vn_reference_op_s, *ops, i - 1,
868 VEC_index (vn_reference_op_s, mem, 0));
869 VEC_replace (vn_reference_op_s, *ops, i,
870 VEC_index (vn_reference_op_s, mem, 1));
871 }
872 else if (VEC_length (vn_reference_op_s, mem) > 2)
873 {
874 VEC_replace (vn_reference_op_s, *ops, i - 1,
875 VEC_index (vn_reference_op_s, mem, 0));
876 VEC_replace (vn_reference_op_s, *ops, i,
877 VEC_index (vn_reference_op_s, mem, 1));
878 /* ??? There is no VEC_splice. */
879 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
880 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
881 }
882 else
883 gcc_unreachable ();
884
885 VEC_free (vn_reference_op_s, heap, mem);
886 *i_p = i;
887 }
861 888
862 /* Transform any SSA_NAME's in a vector of vn_reference_op_s 889 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
863 structures into their value numbers. This is done in-place, and 890 structures into their value numbers. This is done in-place, and
864 the vector passed in is returned. */ 891 the vector passed in is returned. */
865 892
866 static VEC (vn_reference_op_s, heap) * 893 static VEC (vn_reference_op_s, heap) *
867 valueize_refs (VEC (vn_reference_op_s, heap) *orig) 894 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
868 { 895 {
869 vn_reference_op_t vro; 896 vn_reference_op_t vro;
870 int i; 897 unsigned int i;
871 898
872 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++) 899 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
873 { 900 {
874 if (vro->opcode == SSA_NAME 901 if (vro->opcode == SSA_NAME
875 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)) 902 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
877 vro->op0 = SSA_VAL (vro->op0); 904 vro->op0 = SSA_VAL (vro->op0);
878 /* If it transforms from an SSA_NAME to a constant, update 905 /* If it transforms from an SSA_NAME to a constant, update
879 the opcode. */ 906 the opcode. */
880 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME) 907 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
881 vro->opcode = TREE_CODE (vro->op0); 908 vro->opcode = TREE_CODE (vro->op0);
909 /* If it transforms from an SSA_NAME to an address, fold with
910 a preceding indirect reference. */
911 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
912 && VEC_index (vn_reference_op_s,
913 orig, i - 1)->opcode == INDIRECT_REF)
914 {
915 vn_reference_fold_indirect (&orig, &i);
916 continue;
917 }
882 } 918 }
883 /* TODO: Do we want to valueize op2 and op1 of 919 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
884 ARRAY_REF/COMPONENT_REF for Ada */ 920 vro->op1 = SSA_VAL (vro->op1);
885 921 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
922 vro->op2 = SSA_VAL (vro->op2);
886 } 923 }
887 924
888 return orig; 925 return orig;
889 } 926 }
890 927
891 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into 928 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
892 their value numbers. This is done in-place, and the vector passed 929
893 in is returned. */ 930 /* Create a vector of vn_reference_op_s structures from REF, a
894 931 REFERENCE_CLASS_P tree. The vector is shared among all callers of
895 static VEC (tree, gc) * 932 this function. */
896 valueize_vuses (VEC (tree, gc) *orig) 933
897 { 934 static VEC(vn_reference_op_s, heap) *
898 bool made_replacement = false; 935 valueize_shared_reference_ops_from_ref (tree ref)
899 tree vuse; 936 {
900 int i; 937 if (!ref)
901 938 return NULL;
902 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++) 939 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
903 { 940 copy_reference_ops_from_ref (ref, &shared_lookup_references);
904 if (vuse != SSA_VAL (vuse)) 941 shared_lookup_references = valueize_refs (shared_lookup_references);
905 { 942 return shared_lookup_references;
906 made_replacement = true; 943 }
907 VEC_replace (tree, orig, i, SSA_VAL (vuse)); 944
908 } 945 /* Create a vector of vn_reference_op_s structures from CALL, a
909 } 946 call statement. The vector is shared among all callers of
910 947 this function. */
911 if (made_replacement && VEC_length (tree, orig) > 1) 948
912 sort_vuses (orig); 949 static VEC(vn_reference_op_s, heap) *
913 950 valueize_shared_reference_ops_from_call (gimple call)
914 return orig; 951 {
915 } 952 if (!call)
916 953 return NULL;
917 /* Return the single reference statement defining all virtual uses 954 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
918 in VUSES or NULL_TREE, if there are multiple defining statements. 955 copy_reference_ops_from_call (call, &shared_lookup_references);
919 Take into account only definitions that alias REF if following 956 shared_lookup_references = valueize_refs (shared_lookup_references);
920 back-edges. */ 957 return shared_lookup_references;
921
922 static gimple
923 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
924 {
925 gimple def_stmt;
926 tree vuse;
927 unsigned int i;
928
929 gcc_assert (VEC_length (tree, vuses) >= 1);
930
931 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
932 if (gimple_code (def_stmt) == GIMPLE_PHI)
933 {
934 /* We can only handle lookups over PHI nodes for a single
935 virtual operand. */
936 if (VEC_length (tree, vuses) == 1)
937 {
938 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
939 goto cont;
940 }
941 else
942 return NULL;
943 }
944
945 /* Verify each VUSE reaches the same defining stmt. */
946 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
947 {
948 gimple tmp = SSA_NAME_DEF_STMT (vuse);
949 if (tmp != def_stmt)
950 return NULL;
951 }
952
953 /* Now see if the definition aliases ref, and loop until it does. */
954 cont:
955 while (def_stmt
956 && is_gimple_assign (def_stmt)
957 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
958 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
959
960 return def_stmt;
961 } 958 }
962 959
963 /* Lookup a SCCVN reference operation VR in the current hash table. 960 /* Lookup a SCCVN reference operation VR in the current hash table.
964 Returns the resulting value number if it exists in the hash table, 961 Returns the resulting value number if it exists in the hash table,
965 NULL_TREE otherwise. VNRESULT will be filled in with the actual 962 NULL_TREE otherwise. VNRESULT will be filled in with the actual
981 { 978 {
982 if (vnresult) 979 if (vnresult)
983 *vnresult = (vn_reference_t)*slot; 980 *vnresult = (vn_reference_t)*slot;
984 return ((vn_reference_t)*slot)->result; 981 return ((vn_reference_t)*slot)->result;
985 } 982 }
986 983
987 return NULL_TREE; 984 return NULL_TREE;
988 } 985 }
989 986
987 static tree *last_vuse_ptr;
988
989 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
990 with the current VUSE and performs the expression lookup. */
991
992 static void *
993 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
994 {
995 vn_reference_t vr = (vn_reference_t)vr_;
996 void **slot;
997 hashval_t hash;
998
999 if (last_vuse_ptr)
1000 *last_vuse_ptr = vuse;
1001
1002 /* Fixup vuse and hash. */
1003 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
1004 vr->vuse = SSA_VAL (vuse);
1005 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
1006
1007 hash = vr->hashcode;
1008 slot = htab_find_slot_with_hash (current_info->references, vr,
1009 hash, NO_INSERT);
1010 if (!slot && current_info == optimistic_info)
1011 slot = htab_find_slot_with_hash (valid_info->references, vr,
1012 hash, NO_INSERT);
1013 if (slot)
1014 return *slot;
1015
1016 return NULL;
1017 }
1018
1019 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1020 from the statement defining VUSE and if not successful tries to
1021 translate *REFP and VR_ through an aggregate copy at the defintion
1022 of VUSE. */
1023
1024 static void *
1025 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1026 {
1027 vn_reference_t vr = (vn_reference_t)vr_;
1028 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1029 tree fndecl;
1030 tree base;
1031 HOST_WIDE_INT offset, maxsize;
1032
1033 base = ao_ref_base (ref);
1034 offset = ref->offset;
1035 maxsize = ref->max_size;
1036
1037 /* If we cannot constrain the size of the reference we cannot
1038 test if anything kills it. */
1039 if (maxsize == -1)
1040 return (void *)-1;
1041
1042 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1043 from that defintion.
1044 1) Memset. */
1045 if (is_gimple_reg_type (vr->type)
1046 && is_gimple_call (def_stmt)
1047 && (fndecl = gimple_call_fndecl (def_stmt))
1048 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1049 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1050 && integer_zerop (gimple_call_arg (def_stmt, 1))
1051 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1052 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1053 {
1054 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1055 tree base2;
1056 HOST_WIDE_INT offset2, size2, maxsize2;
1057 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1058 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1059 if ((unsigned HOST_WIDE_INT)size2 / 8
1060 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1061 && operand_equal_p (base, base2, 0)
1062 && offset2 <= offset
1063 && offset2 + size2 >= offset + maxsize)
1064 {
1065 tree val = fold_convert (vr->type, integer_zero_node);
1066 unsigned int value_id = get_or_alloc_constant_value_id (val);
1067 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1068 VEC_copy (vn_reference_op_s,
1069 heap, vr->operands),
1070 val, value_id);
1071 }
1072 }
1073
1074 /* 2) Assignment from an empty CONSTRUCTOR. */
1075 else if (is_gimple_reg_type (vr->type)
1076 && gimple_assign_single_p (def_stmt)
1077 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1078 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1079 {
1080 tree base2;
1081 HOST_WIDE_INT offset2, size2, maxsize2;
1082 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1083 &offset2, &size2, &maxsize2);
1084 if (operand_equal_p (base, base2, 0)
1085 && offset2 <= offset
1086 && offset2 + size2 >= offset + maxsize)
1087 {
1088 tree val = fold_convert (vr->type, integer_zero_node);
1089 unsigned int value_id = get_or_alloc_constant_value_id (val);
1090 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1091 VEC_copy (vn_reference_op_s,
1092 heap, vr->operands),
1093 val, value_id);
1094 }
1095 }
1096
1097 /* For aggregate copies translate the reference through them if
1098 the copy kills ref. */
1099 else if (gimple_assign_single_p (def_stmt)
1100 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1101 || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
1102 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1103 {
1104 tree base2;
1105 HOST_WIDE_INT offset2, size2, maxsize2;
1106 int i, j;
1107 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1108 vn_reference_op_t vro;
1109 ao_ref r;
1110
1111 /* See if the assignment kills REF. */
1112 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1113 &offset2, &size2, &maxsize2);
1114 if (!operand_equal_p (base, base2, 0)
1115 || offset2 > offset
1116 || offset2 + size2 < offset + maxsize)
1117 return (void *)-1;
1118
1119 /* Find the common base of ref and the lhs. */
1120 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1121 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1122 j = VEC_length (vn_reference_op_s, lhs) - 1;
1123 while (j >= 0 && i >= 0
1124 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1125 vr->operands, i),
1126 VEC_index (vn_reference_op_s, lhs, j)))
1127 {
1128 i--;
1129 j--;
1130 }
1131
1132 VEC_free (vn_reference_op_s, heap, lhs);
1133 /* i now points to the first additional op.
1134 ??? LHS may not be completely contained in VR, one or more
1135 VIEW_CONVERT_EXPRs could be in its way. We could at least
1136 try handling outermost VIEW_CONVERT_EXPRs. */
1137 if (j != -1)
1138 return (void *)-1;
1139
1140 /* Now re-write REF to be based on the rhs of the assignment. */
1141 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1142 /* We need to pre-pend vr->operands[0..i] to rhs. */
1143 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1144 > VEC_length (vn_reference_op_s, vr->operands))
1145 {
1146 VEC (vn_reference_op_s, heap) *old = vr->operands;
1147 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1148 i + 1 + VEC_length (vn_reference_op_s, rhs));
1149 if (old == shared_lookup_references
1150 && vr->operands != old)
1151 shared_lookup_references = NULL;
1152 }
1153 else
1154 VEC_truncate (vn_reference_op_s, vr->operands,
1155 i + 1 + VEC_length (vn_reference_op_s, rhs));
1156 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1157 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1158 VEC_free (vn_reference_op_s, heap, rhs);
1159 vr->hashcode = vn_reference_compute_hash (vr);
1160
1161 /* Adjust *ref from the new operands. */
1162 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1163 return (void *)-1;
1164 /* This can happen with bitfields. */
1165 if (ref->size != r.size)
1166 return (void *)-1;
1167 *ref = r;
1168
1169 /* Do not update last seen VUSE after translating. */
1170 last_vuse_ptr = NULL;
1171
1172 /* Keep looking for the adjusted *REF / VR pair. */
1173 return NULL;
1174 }
1175
1176 /* Bail out and stop walking. */
1177 return (void *)-1;
1178 }
990 1179
991 /* Lookup a reference operation by it's parts, in the current hash table. 1180 /* Lookup a reference operation by it's parts, in the current hash table.
992 Returns the resulting value number if it exists in the hash table, 1181 Returns the resulting value number if it exists in the hash table,
993 NULL_TREE otherwise. VNRESULT will be filled in with the actual 1182 NULL_TREE otherwise. VNRESULT will be filled in with the actual
994 vn_reference_t stored in the hashtable if something is found. */ 1183 vn_reference_t stored in the hashtable if something is found. */
995 1184
996 tree 1185 tree
997 vn_reference_lookup_pieces (VEC (tree, gc) *vuses, 1186 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
998 VEC (vn_reference_op_s, heap) *operands, 1187 VEC (vn_reference_op_s, heap) *operands,
999 vn_reference_t *vnresult, bool maywalk) 1188 vn_reference_t *vnresult, bool maywalk)
1000 { 1189 {
1001 struct vn_reference_s vr1; 1190 struct vn_reference_s vr1;
1002 tree result; 1191 vn_reference_t tmp;
1003 if (vnresult) 1192
1004 *vnresult = NULL; 1193 if (!vnresult)
1005 1194 vnresult = &tmp;
1006 vr1.vuses = valueize_vuses (vuses); 1195 *vnresult = NULL;
1007 vr1.operands = valueize_refs (operands); 1196
1197 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1198 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1199 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1200 VEC_length (vn_reference_op_s, operands));
1201 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1202 VEC_address (vn_reference_op_s, operands),
1203 sizeof (vn_reference_op_s)
1204 * VEC_length (vn_reference_op_s, operands));
1205 vr1.operands = operands = shared_lookup_references
1206 = valueize_refs (shared_lookup_references);
1207 vr1.type = type;
1208 vr1.set = set;
1008 vr1.hashcode = vn_reference_compute_hash (&vr1); 1209 vr1.hashcode = vn_reference_compute_hash (&vr1);
1009 result = vn_reference_lookup_1 (&vr1, vnresult); 1210 vn_reference_lookup_1 (&vr1, vnresult);
1010 1211
1011 /* If there is a single defining statement for all virtual uses, we can 1212 if (!*vnresult
1012 use that, following virtual use-def chains. */
1013 if (!result
1014 && maywalk 1213 && maywalk
1015 && vr1.vuses 1214 && vr1.vuse)
1016 && VEC_length (tree, vr1.vuses) >= 1) 1215 {
1017 { 1216 ao_ref r;
1018 tree ref = get_ref_from_reference_ops (operands); 1217 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1019 gimple def_stmt; 1218 *vnresult =
1020 if (ref 1219 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1021 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses)) 1220 vn_reference_lookup_2,
1022 && is_gimple_assign (def_stmt)) 1221 vn_reference_lookup_3, &vr1);
1023 { 1222 if (vr1.operands != operands)
1024 /* We are now at an aliasing definition for the vuses we want to 1223 VEC_free (vn_reference_op_s, heap, vr1.operands);
1025 look up. Re-do the lookup with the vdefs for this stmt. */ 1224 }
1026 vdefs_to_vec (def_stmt, &vuses); 1225
1027 vr1.vuses = valueize_vuses (vuses); 1226 if (*vnresult)
1028 vr1.hashcode = vn_reference_compute_hash (&vr1); 1227 return (*vnresult)->result;
1029 result = vn_reference_lookup_1 (&vr1, vnresult); 1228
1030 } 1229 return NULL_TREE;
1031 }
1032
1033 return result;
1034 } 1230 }
1035 1231
1036 /* Lookup OP in the current hash table, and return the resulting value 1232 /* Lookup OP in the current hash table, and return the resulting value
1037 number if it exists in the hash table. Return NULL_TREE if it does 1233 number if it exists in the hash table. Return NULL_TREE if it does
1038 not exist in the hash table or if the result field of the structure 1234 not exist in the hash table or if the result field of the structure
1039 was NULL.. VNRESULT will be filled in with the vn_reference_t 1235 was NULL.. VNRESULT will be filled in with the vn_reference_t
1040 stored in the hashtable if one exists. */ 1236 stored in the hashtable if one exists. */
1041 1237
1042 tree 1238 tree
1043 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk, 1239 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1044 vn_reference_t *vnresult) 1240 vn_reference_t *vnresult)
1045 { 1241 {
1242 VEC (vn_reference_op_s, heap) *operands;
1046 struct vn_reference_s vr1; 1243 struct vn_reference_s vr1;
1047 tree result; 1244
1048 gimple def_stmt;
1049 if (vnresult) 1245 if (vnresult)
1050 *vnresult = NULL; 1246 *vnresult = NULL;
1051 1247
1052 vr1.vuses = valueize_vuses (vuses); 1248 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1053 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op)); 1249 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1250 vr1.type = TREE_TYPE (op);
1251 vr1.set = get_alias_set (op);
1054 vr1.hashcode = vn_reference_compute_hash (&vr1); 1252 vr1.hashcode = vn_reference_compute_hash (&vr1);
1055 result = vn_reference_lookup_1 (&vr1, vnresult); 1253
1056 1254 if (maywalk
1057 /* If there is a single defining statement for all virtual uses, we can 1255 && vr1.vuse)
1058 use that, following virtual use-def chains. */ 1256 {
1059 if (!result 1257 vn_reference_t wvnresult;
1060 && maywalk 1258 ao_ref r;
1061 && vr1.vuses 1259 ao_ref_init (&r, op);
1062 && VEC_length (tree, vr1.vuses) >= 1 1260 wvnresult =
1063 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses)) 1261 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1064 && is_gimple_assign (def_stmt)) 1262 vn_reference_lookup_2,
1065 { 1263 vn_reference_lookup_3, &vr1);
1066 /* We are now at an aliasing definition for the vuses we want to 1264 if (vr1.operands != operands)
1067 look up. Re-do the lookup with the vdefs for this stmt. */ 1265 VEC_free (vn_reference_op_s, heap, vr1.operands);
1068 vdefs_to_vec (def_stmt, &vuses); 1266 if (wvnresult)
1069 vr1.vuses = valueize_vuses (vuses); 1267 {
1070 vr1.hashcode = vn_reference_compute_hash (&vr1); 1268 if (vnresult)
1071 result = vn_reference_lookup_1 (&vr1, vnresult); 1269 *vnresult = wvnresult;
1072 } 1270 return wvnresult->result;
1073 1271 }
1074 return result; 1272
1273 return NULL_TREE;
1274 }
1275
1276 return vn_reference_lookup_1 (&vr1, vnresult);
1075 } 1277 }
1076 1278
1077 1279
1078 /* Insert OP into the current hash table with a value number of 1280 /* Insert OP into the current hash table with a value number of
1079 RESULT, and return the resulting reference structure we created. */ 1281 RESULT, and return the resulting reference structure we created. */
1080 1282
1081 vn_reference_t 1283 vn_reference_t
1082 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses) 1284 vn_reference_insert (tree op, tree result, tree vuse)
1083 { 1285 {
1084 void **slot; 1286 void **slot;
1085 vn_reference_t vr1; 1287 vn_reference_t vr1;
1086 1288
1087 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); 1289 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1088 if (TREE_CODE (result) == SSA_NAME) 1290 if (TREE_CODE (result) == SSA_NAME)
1089 vr1->value_id = VN_INFO (result)->value_id; 1291 vr1->value_id = VN_INFO (result)->value_id;
1090 else 1292 else
1091 vr1->value_id = get_or_alloc_constant_value_id (result); 1293 vr1->value_id = get_or_alloc_constant_value_id (result);
1092 vr1->vuses = valueize_vuses (vuses); 1294 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1093 vr1->operands = valueize_refs (create_reference_ops_from_ref (op)); 1295 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1296 vr1->type = TREE_TYPE (op);
1297 vr1->set = get_alias_set (op);
1094 vr1->hashcode = vn_reference_compute_hash (vr1); 1298 vr1->hashcode = vn_reference_compute_hash (vr1);
1095 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; 1299 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1096 1300
1097 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, 1301 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1098 INSERT); 1302 INSERT);
1116 /* Insert a reference by it's pieces into the current hash table with 1320 /* Insert a reference by it's pieces into the current hash table with
1117 a value number of RESULT. Return the resulting reference 1321 a value number of RESULT. Return the resulting reference
1118 structure we created. */ 1322 structure we created. */
1119 1323
1120 vn_reference_t 1324 vn_reference_t
1121 vn_reference_insert_pieces (VEC (tree, gc) *vuses, 1325 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1122 VEC (vn_reference_op_s, heap) *operands, 1326 VEC (vn_reference_op_s, heap) *operands,
1123 tree result, unsigned int value_id) 1327 tree result, unsigned int value_id)
1124 1328
1125 { 1329 {
1126 void **slot; 1330 void **slot;
1127 vn_reference_t vr1; 1331 vn_reference_t vr1;
1128 1332
1129 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); 1333 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1130 vr1->value_id = value_id; 1334 vr1->value_id = value_id;
1131 vr1->vuses = valueize_vuses (vuses); 1335 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1132 vr1->operands = valueize_refs (operands); 1336 vr1->operands = valueize_refs (operands);
1337 vr1->type = type;
1338 vr1->set = set;
1133 vr1->hashcode = vn_reference_compute_hash (vr1); 1339 vr1->hashcode = vn_reference_compute_hash (vr1);
1134 if (result && TREE_CODE (result) == SSA_NAME) 1340 if (result && TREE_CODE (result) == SSA_NAME)
1135 result = SSA_VAL (result); 1341 result = SSA_VAL (result);
1136 vr1->result = result; 1342 vr1->result = result;
1137 1343
1138 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, 1344 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1139 INSERT); 1345 INSERT);
1140 1346
1141 /* At this point we should have all the things inserted that we have 1347 /* At this point we should have all the things inserted that we have
1142 seen before, and we should never try inserting something that 1348 seen before, and we should never try inserting something that
1143 already exists. */ 1349 already exists. */
1144 gcc_assert (!*slot); 1350 gcc_assert (!*slot);
1145 if (*slot) 1351 if (*slot)
1146 free_reference (*slot); 1352 free_reference (*slot);
1147 1353
1148 *slot = vr1; 1354 *slot = vr1;
1149 return vr1; 1355 return vr1;
1150 } 1356 }
1151 1357
1152 /* Compute and return the hash value for nary operation VBO1. */ 1358 /* Compute and return the hash value for nary operation VBO1. */
1153 1359
1154 inline hashval_t 1360 hashval_t
1155 vn_nary_op_compute_hash (const vn_nary_op_t vno1) 1361 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1156 { 1362 {
1157 hashval_t hash = 0; 1363 hashval_t hash = 0;
1158 unsigned i; 1364 unsigned i;
1159 1365
1216 if it exists. */ 1422 if it exists. */
1217 1423
1218 tree 1424 tree
1219 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, 1425 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1220 tree type, tree op0, tree op1, tree op2, 1426 tree type, tree op0, tree op1, tree op2,
1221 tree op3, vn_nary_op_t *vnresult) 1427 tree op3, vn_nary_op_t *vnresult)
1222 { 1428 {
1223 void **slot; 1429 void **slot;
1224 struct vn_nary_op_s vno1; 1430 struct vn_nary_op_s vno1;
1225 if (vnresult) 1431 if (vnresult)
1226 *vnresult = NULL; 1432 *vnresult = NULL;
1320 vn_nary_op_t 1526 vn_nary_op_t
1321 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, 1527 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1322 tree type, tree op0, 1528 tree type, tree op0,
1323 tree op1, tree op2, tree op3, 1529 tree op1, tree op2, tree op3,
1324 tree result, 1530 tree result,
1325 unsigned int value_id) 1531 unsigned int value_id)
1326 { 1532 {
1327 void **slot; 1533 void **slot;
1328 vn_nary_op_t vno1; 1534 vn_nary_op_t vno1;
1329 1535
1330 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack, 1536 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1348 INSERT); 1554 INSERT);
1349 gcc_assert (!*slot); 1555 gcc_assert (!*slot);
1350 1556
1351 *slot = vno1; 1557 *slot = vno1;
1352 return vno1; 1558 return vno1;
1353 1559
1354 } 1560 }
1355 1561
1356 /* Insert OP into the current hash table with a value number of 1562 /* Insert OP into the current hash table with a value number of
1357 RESULT. Return the vn_nary_op_t structure we created and put in 1563 RESULT. Return the vn_nary_op_t structure we created and put in
1358 the hashtable. */ 1564 the hashtable. */
1609 1815
1610 currval = SSA_VAL (from); 1816 currval = SSA_VAL (from);
1611 1817
1612 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME)) 1818 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1613 { 1819 {
1614 SSA_VAL (from) = to; 1820 VN_INFO (from)->valnum = to;
1615 if (dump_file && (dump_flags & TDF_DETAILS)) 1821 if (dump_file && (dump_flags & TDF_DETAILS))
1616 fprintf (dump_file, " (changed)\n"); 1822 fprintf (dump_file, " (changed)\n");
1617 return true; 1823 return true;
1618 } 1824 }
1619 if (dump_file && (dump_flags & TDF_DETAILS)) 1825 if (dump_file && (dump_flags & TDF_DETAILS))
1717 visit_reference_op_call (tree lhs, gimple stmt) 1923 visit_reference_op_call (tree lhs, gimple stmt)
1718 { 1924 {
1719 bool changed = false; 1925 bool changed = false;
1720 struct vn_reference_s vr1; 1926 struct vn_reference_s vr1;
1721 tree result; 1927 tree result;
1722 1928 tree vuse = gimple_vuse (stmt);
1723 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt)); 1929
1724 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt)); 1930 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1931 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1932 vr1.type = gimple_expr_type (stmt);
1933 vr1.set = 0;
1725 vr1.hashcode = vn_reference_compute_hash (&vr1); 1934 vr1.hashcode = vn_reference_compute_hash (&vr1);
1726 result = vn_reference_lookup_1 (&vr1, NULL); 1935 result = vn_reference_lookup_1 (&vr1, NULL);
1727 if (result) 1936 if (result)
1728 { 1937 {
1729 changed = set_ssa_val_to (lhs, result); 1938 changed = set_ssa_val_to (lhs, result);
1735 { 1944 {
1736 void **slot; 1945 void **slot;
1737 vn_reference_t vr2; 1946 vn_reference_t vr2;
1738 changed = set_ssa_val_to (lhs, lhs); 1947 changed = set_ssa_val_to (lhs, lhs);
1739 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); 1948 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1740 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt)); 1949 vr2->vuse = vr1.vuse;
1741 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt)); 1950 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1951 vr2->type = vr1.type;
1952 vr2->set = vr1.set;
1742 vr2->hashcode = vr1.hashcode; 1953 vr2->hashcode = vr1.hashcode;
1743 vr2->result = lhs; 1954 vr2->result = lhs;
1744 slot = htab_find_slot_with_hash (current_info->references, 1955 slot = htab_find_slot_with_hash (current_info->references,
1745 vr2, vr2->hashcode, INSERT); 1956 vr2, vr2->hashcode, INSERT);
1746 if (*slot) 1957 if (*slot)
1756 1967
1757 static bool 1968 static bool
1758 visit_reference_op_load (tree lhs, tree op, gimple stmt) 1969 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1759 { 1970 {
1760 bool changed = false; 1971 bool changed = false;
1761 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true, 1972 tree last_vuse;
1762 NULL); 1973 tree result;
1974
1975 last_vuse = gimple_vuse (stmt);
1976 last_vuse_ptr = &last_vuse;
1977 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1978 last_vuse_ptr = NULL;
1979
1980 /* If we have a VCE, try looking up its operand as it might be stored in
1981 a different type. */
1982 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
1983 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1984 true, NULL);
1763 1985
1764 /* We handle type-punning through unions by value-numbering based 1986 /* We handle type-punning through unions by value-numbering based
1765 on offset and size of the access. Be prepared to handle a 1987 on offset and size of the access. Be prepared to handle a
1766 type-mismatch here via creating a VIEW_CONVERT_EXPR. */ 1988 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1767 if (result 1989 if (result
1835 } 2057 }
1836 } 2058 }
1837 else 2059 else
1838 { 2060 {
1839 changed = set_ssa_val_to (lhs, lhs); 2061 changed = set_ssa_val_to (lhs, lhs);
1840 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt)); 2062 vn_reference_insert (op, lhs, last_vuse);
1841 } 2063 }
1842 2064
1843 return changed; 2065 return changed;
1844 } 2066 }
1845 2067
1868 this store. 2090 this store.
1869 2091
1870 Otherwise, the vdefs for the store are used when inserting into 2092 Otherwise, the vdefs for the store are used when inserting into
1871 the table, since the store generates a new memory state. */ 2093 the table, since the store generates a new memory state. */
1872 2094
1873 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false, 2095 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
1874 NULL);
1875 2096
1876 if (result) 2097 if (result)
1877 { 2098 {
1878 if (TREE_CODE (result) == SSA_NAME) 2099 if (TREE_CODE (result) == SSA_NAME)
1879 result = SSA_VAL (result); 2100 result = SSA_VAL (result);
1882 resultsame = expressions_equal_p (result, op); 2103 resultsame = expressions_equal_p (result, op);
1883 } 2104 }
1884 2105
1885 if (!result || !resultsame) 2106 if (!result || !resultsame)
1886 { 2107 {
1887 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1888 int i;
1889 tree vdef; 2108 tree vdef;
1890 2109
1891 if (dump_file && (dump_flags & TDF_DETAILS)) 2110 if (dump_file && (dump_flags & TDF_DETAILS))
1892 { 2111 {
1893 fprintf (dump_file, "No store match\n"); 2112 fprintf (dump_file, "No store match\n");
1897 print_generic_expr (dump_file, op, 0); 2116 print_generic_expr (dump_file, op, 0);
1898 fprintf (dump_file, "\n"); 2117 fprintf (dump_file, "\n");
1899 } 2118 }
1900 /* Have to set value numbers before insert, since insert is 2119 /* Have to set value numbers before insert, since insert is
1901 going to valueize the references in-place. */ 2120 going to valueize the references in-place. */
1902 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++) 2121 if ((vdef = gimple_vdef (stmt)))
1903 { 2122 {
1904 VN_INFO (vdef)->use_processed = true; 2123 VN_INFO (vdef)->use_processed = true;
1905 changed |= set_ssa_val_to (vdef, vdef); 2124 changed |= set_ssa_val_to (vdef, vdef);
1906 } 2125 }
1907 2126
1908 /* Do not insert structure copies into the tables. */ 2127 /* Do not insert structure copies into the tables. */
1909 if (is_gimple_min_invariant (op) 2128 if (is_gimple_min_invariant (op)
1910 || is_gimple_reg (op)) 2129 || is_gimple_reg (op))
1911 vn_reference_insert (lhs, op, vdefs); 2130 vn_reference_insert (lhs, op, vdef);
1912 } 2131 }
1913 else 2132 else
1914 { 2133 {
1915 /* We had a match, so value number the vdefs to have the value 2134 /* We had a match, so value number the vdef to have the value
1916 number of the vuses they came from. */ 2135 number of the vuse it came from. */
1917 ssa_op_iter op_iter; 2136 tree def, use;
1918 def_operand_p var;
1919 vuse_vec_p vv;
1920 2137
1921 if (dump_file && (dump_flags & TDF_DETAILS)) 2138 if (dump_file && (dump_flags & TDF_DETAILS))
1922 fprintf (dump_file, "Store matched earlier value," 2139 fprintf (dump_file, "Store matched earlier value,"
1923 "value numbering store vdefs to matching vuses.\n"); 2140 "value numbering store vdefs to matching vuses.\n");
1924 2141
1925 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter) 2142 def = gimple_vdef (stmt);
1926 { 2143 use = gimple_vuse (stmt);
1927 tree def = DEF_FROM_PTR (var); 2144
1928 tree use; 2145 VN_INFO (def)->use_processed = true;
1929 2146 changed |= set_ssa_val_to (def, SSA_VAL (use));
1930 /* Uh, if the vuse is a multiuse, we can't really do much
1931 here, sadly, since we don't know which value number of
1932 which vuse to use. */
1933 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1934 use = def;
1935 else
1936 use = VUSE_ELEMENT_VAR (*vv, 0);
1937
1938 VN_INFO (def)->use_processed = true;
1939 changed |= set_ssa_val_to (def, SSA_VAL (use));
1940 }
1941 } 2147 }
1942 2148
1943 return changed; 2149 return changed;
1944 } 2150 }
1945 2151
2496 gimple opstmtb = SSA_NAME_DEF_STMT (opb); 2702 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2497 basic_block bba; 2703 basic_block bba;
2498 basic_block bbb; 2704 basic_block bbb;
2499 2705
2500 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb)) 2706 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2501 return 0; 2707 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2502 else if (gimple_nop_p (opstmta)) 2708 else if (gimple_nop_p (opstmta))
2503 return -1; 2709 return -1;
2504 else if (gimple_nop_p (opstmtb)) 2710 else if (gimple_nop_p (opstmtb))
2505 return 1; 2711 return 1;
2506 2712
2507 bba = gimple_bb (opstmta); 2713 bba = gimple_bb (opstmta);
2508 bbb = gimple_bb (opstmtb); 2714 bbb = gimple_bb (opstmtb);
2509 2715
2510 if (!bba && !bbb) 2716 if (!bba && !bbb)
2511 return 0; 2717 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2512 else if (!bba) 2718 else if (!bba)
2513 return -1; 2719 return -1;
2514 else if (!bbb) 2720 else if (!bbb)
2515 return 1; 2721 return 1;
2516 2722
2517 if (bba == bbb) 2723 if (bba == bbb)
2518 { 2724 {
2519 if (gimple_code (opstmta) == GIMPLE_PHI 2725 if (gimple_code (opstmta) == GIMPLE_PHI
2520 && gimple_code (opstmtb) == GIMPLE_PHI) 2726 && gimple_code (opstmtb) == GIMPLE_PHI)
2521 return 0; 2727 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2522 else if (gimple_code (opstmta) == GIMPLE_PHI) 2728 else if (gimple_code (opstmta) == GIMPLE_PHI)
2523 return -1; 2729 return -1;
2524 else if (gimple_code (opstmtb) == GIMPLE_PHI) 2730 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2525 return 1; 2731 return 1;
2526 return gimple_uid (opstmta) - gimple_uid (opstmtb); 2732 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2733 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2734 else
2735 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2527 } 2736 }
2528 return rpo_numbers[bba->index] - rpo_numbers[bbb->index]; 2737 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2529 } 2738 }
2530 2739
2531 /* Sort an array containing members of a strongly connected component 2740 /* Sort an array containing members of a strongly connected component
2538 { 2747 {
2539 qsort (VEC_address (tree, scc), 2748 qsort (VEC_address (tree, scc),
2540 VEC_length (tree, scc), 2749 VEC_length (tree, scc),
2541 sizeof (tree), 2750 sizeof (tree),
2542 compare_ops); 2751 compare_ops);
2752 }
2753
2754 /* Insert the no longer used nary *ENTRY to the current hash. */
2755
2756 static int
2757 copy_nary (void **entry, void *data ATTRIBUTE_UNUSED)
2758 {
2759 vn_nary_op_t onary = (vn_nary_op_t) *entry;
2760 size_t size = (sizeof (struct vn_nary_op_s)
2761 - sizeof (tree) * (4 - onary->length));
2762 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
2763 size);
2764 void **slot;
2765 memcpy (nary, onary, size);
2766 slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode,
2767 INSERT);
2768 gcc_assert (!*slot);
2769 *slot = nary;
2770 return 1;
2771 }
2772
2773 /* Insert the no longer used phi *ENTRY to the current hash. */
2774
2775 static int
2776 copy_phis (void **entry, void *data ATTRIBUTE_UNUSED)
2777 {
2778 vn_phi_t ophi = (vn_phi_t) *entry;
2779 vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool);
2780 void **slot;
2781 memcpy (phi, ophi, sizeof (*phi));
2782 ophi->phiargs = NULL;
2783 slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode,
2784 INSERT);
2785 *slot = phi;
2786 return 1;
2787 }
2788
2789 /* Insert the no longer used reference *ENTRY to the current hash. */
2790
2791 static int
2792 copy_references (void **entry, void *data ATTRIBUTE_UNUSED)
2793 {
2794 vn_reference_t oref = (vn_reference_t) *entry;
2795 vn_reference_t ref;
2796 void **slot;
2797 ref = (vn_reference_t) pool_alloc (current_info->references_pool);
2798 memcpy (ref, oref, sizeof (*ref));
2799 oref->operands = NULL;
2800 slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode,
2801 INSERT);
2802 if (*slot)
2803 free_reference (*slot);
2804 *slot = ref;
2805 return 1;
2543 } 2806 }
2544 2807
2545 /* Process a strongly connected component in the SSA graph. */ 2808 /* Process a strongly connected component in the SSA graph. */
2546 2809
2547 static void 2810 static void
2585 changed |= visit_use (var); 2848 changed |= visit_use (var);
2586 } 2849 }
2587 2850
2588 statistics_histogram_event (cfun, "SCC iterations", iterations); 2851 statistics_histogram_event (cfun, "SCC iterations", iterations);
2589 2852
2590 /* Finally, visit the SCC once using the valid table. */ 2853 /* Finally, copy the contents of the no longer used optimistic
2854 table to the valid table. */
2591 current_info = valid_info; 2855 current_info = valid_info;
2592 for (i = 0; VEC_iterate (tree, scc, i, var); i++) 2856 htab_traverse (optimistic_info->nary, copy_nary, NULL);
2593 visit_use (var); 2857 htab_traverse (optimistic_info->phis, copy_phis, NULL);
2858 htab_traverse (optimistic_info->references, copy_references, NULL);
2594 } 2859 }
2595 } 2860 }
2596 2861
2597 DEF_VEC_O(ssa_op_iter); 2862 DEF_VEC_O(ssa_op_iter);
2598 DEF_VEC_ALLOC_O(ssa_op_iter,heap); 2863 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2784 3049
2785 calculate_dominance_info (CDI_DOMINATORS); 3050 calculate_dominance_info (CDI_DOMINATORS);
2786 sccstack = NULL; 3051 sccstack = NULL;
2787 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq, 3052 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2788 free); 3053 free);
2789 3054
2790 constant_value_ids = BITMAP_ALLOC (NULL); 3055 constant_value_ids = BITMAP_ALLOC (NULL);
2791 3056
2792 next_dfs_num = 1; 3057 next_dfs_num = 1;
2793 next_value_id = 1; 3058 next_value_id = 1;
2794 3059
2795 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1); 3060 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2796 /* VEC_alloc doesn't actually grow it to the right size, it just 3061 /* VEC_alloc doesn't actually grow it to the right size, it just
2797 preallocates the space to do so. */ 3062 preallocates the space to do so. */
2798 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1); 3063 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2799 gcc_obstack_init (&vn_ssa_aux_obstack); 3064 gcc_obstack_init (&vn_ssa_aux_obstack);
2800 3065
2801 shared_lookup_phiargs = NULL; 3066 shared_lookup_phiargs = NULL;
2802 shared_lookup_vops = NULL;
2803 shared_lookup_references = NULL; 3067 shared_lookup_references = NULL;
2804 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); 3068 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2805 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); 3069 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2806 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false); 3070 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2807 3071
2843 size_t i; 3107 size_t i;
2844 3108
2845 htab_delete (constant_to_value_id); 3109 htab_delete (constant_to_value_id);
2846 BITMAP_FREE (constant_value_ids); 3110 BITMAP_FREE (constant_value_ids);
2847 VEC_free (tree, heap, shared_lookup_phiargs); 3111 VEC_free (tree, heap, shared_lookup_phiargs);
2848 VEC_free (tree, gc, shared_lookup_vops);
2849 VEC_free (vn_reference_op_s, heap, shared_lookup_references); 3112 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2850 XDELETEVEC (rpo_numbers); 3113 XDELETEVEC (rpo_numbers);
2851 3114
2852 for (i = 0; i < num_ssa_names; i++) 3115 for (i = 0; i < num_ssa_names; i++)
2853 { 3116 {
2878 3141
2879 /* Now set the value ids of the things we had put in the hash 3142 /* Now set the value ids of the things we had put in the hash
2880 table. */ 3143 table. */
2881 3144
2882 FOR_EACH_HTAB_ELEMENT (valid_info->nary, 3145 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2883 vno, vn_nary_op_t, hi) 3146 vno, vn_nary_op_t, hi)
2884 { 3147 {
2885 if (vno->result) 3148 if (vno->result)
2886 { 3149 {
2887 if (TREE_CODE (vno->result) == SSA_NAME) 3150 if (TREE_CODE (vno->result) == SSA_NAME)
2888 vno->value_id = VN_INFO (vno->result)->value_id; 3151 vno->value_id = VN_INFO (vno->result)->value_id;
2890 vno->value_id = get_or_alloc_constant_value_id (vno->result); 3153 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2891 } 3154 }
2892 } 3155 }
2893 3156
2894 FOR_EACH_HTAB_ELEMENT (valid_info->phis, 3157 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2895 vp, vn_phi_t, hi) 3158 vp, vn_phi_t, hi)
2896 { 3159 {
2897 if (vp->result) 3160 if (vp->result)
2898 { 3161 {
2899 if (TREE_CODE (vp->result) == SSA_NAME) 3162 if (TREE_CODE (vp->result) == SSA_NAME)
2900 vp->value_id = VN_INFO (vp->result)->value_id; 3163 vp->value_id = VN_INFO (vp->result)->value_id;
2902 vp->value_id = get_or_alloc_constant_value_id (vp->result); 3165 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2903 } 3166 }
2904 } 3167 }
2905 3168
2906 FOR_EACH_HTAB_ELEMENT (valid_info->references, 3169 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2907 vr, vn_reference_t, hi) 3170 vr, vn_reference_t, hi)
2908 { 3171 {
2909 if (vr->result) 3172 if (vr->result)
2910 { 3173 {
2911 if (TREE_CODE (vr->result) == SSA_NAME) 3174 if (TREE_CODE (vr->result) == SSA_NAME)
2912 vr->value_id = VN_INFO (vr->result)->value_id; 3175 vr->value_id = VN_INFO (vr->result)->value_id;
2923 run_scc_vn (bool may_insert_arg) 3186 run_scc_vn (bool may_insert_arg)
2924 { 3187 {
2925 size_t i; 3188 size_t i;
2926 tree param; 3189 tree param;
2927 bool changed = true; 3190 bool changed = true;
2928 3191
2929 may_insert = may_insert_arg; 3192 may_insert = may_insert_arg;
2930 3193
2931 init_scc_vn (); 3194 init_scc_vn ();
2932 current_info = valid_info; 3195 current_info = valid_info;
2933 3196
2936 param = TREE_CHAIN (param)) 3199 param = TREE_CHAIN (param))
2937 { 3200 {
2938 if (gimple_default_def (cfun, param) != NULL) 3201 if (gimple_default_def (cfun, param) != NULL)
2939 { 3202 {
2940 tree def = gimple_default_def (cfun, param); 3203 tree def = gimple_default_def (cfun, param);
2941 SSA_VAL (def) = def; 3204 VN_INFO (def)->valnum = def;
2942 } 3205 }
2943 } 3206 }
2944 3207
2945 for (i = 1; i < num_ssa_names; ++i) 3208 for (i = 1; i < num_ssa_names; ++i)
2946 { 3209 {
2955 return false; 3218 return false;
2956 } 3219 }
2957 } 3220 }
2958 3221
2959 /* Initialize the value ids. */ 3222 /* Initialize the value ids. */
2960 3223
2961 for (i = 1; i < num_ssa_names; ++i) 3224 for (i = 1; i < num_ssa_names; ++i)
2962 { 3225 {
2963 tree name = ssa_name (i); 3226 tree name = ssa_name (i);
2964 vn_ssa_aux_t info; 3227 vn_ssa_aux_t info;
2965 if (!name) 3228 if (!name)
2966 continue; 3229 continue;
2967 info = VN_INFO (name); 3230 info = VN_INFO (name);
2968 if (info->valnum == name) 3231 if (info->valnum == name
3232 || info->valnum == VN_TOP)
2969 info->value_id = get_next_value_id (); 3233 info->value_id = get_next_value_id ();
2970 else if (is_gimple_min_invariant (info->valnum)) 3234 else if (is_gimple_min_invariant (info->valnum))
2971 info->value_id = get_or_alloc_constant_value_id (info->valnum); 3235 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2972 } 3236 }
2973 3237
2974 /* Propagate until they stop changing. */ 3238 /* Propagate until they stop changing. */
2975 while (changed) 3239 while (changed)
2976 { 3240 {
2977 changed = false; 3241 changed = false;
2978 for (i = 1; i < num_ssa_names; ++i) 3242 for (i = 1; i < num_ssa_names; ++i)
2989 changed = true; 3253 changed = true;
2990 info->value_id = VN_INFO (info->valnum)->value_id; 3254 info->value_id = VN_INFO (info->valnum)->value_id;
2991 } 3255 }
2992 } 3256 }
2993 } 3257 }
2994 3258
2995 set_hashtable_value_ids (); 3259 set_hashtable_value_ids ();
2996 3260
2997 if (dump_file && (dump_flags & TDF_DETAILS)) 3261 if (dump_file && (dump_flags & TDF_DETAILS))
2998 { 3262 {
2999 fprintf (dump_file, "Value numbers:\n"); 3263 fprintf (dump_file, "Value numbers:\n");
3000 for (i = 0; i < num_ssa_names; i++) 3264 for (i = 0; i < num_ssa_names; i++)
3001 { 3265 {
3017 } 3281 }
3018 3282
3019 /* Return the maximum value id we have ever seen. */ 3283 /* Return the maximum value id we have ever seen. */
3020 3284
3021 unsigned int 3285 unsigned int
3022 get_max_value_id (void) 3286 get_max_value_id (void)
3023 { 3287 {
3024 return next_value_id; 3288 return next_value_id;
3025 } 3289 }
3026 3290
3027 /* Return the next unique value id. */ 3291 /* Return the next unique value id. */
3069 return true; 3333 return true;
3070 3334
3071 return false; 3335 return false;
3072 } 3336 }
3073 3337
3074 /* Sort the VUSE array so that we can do equality comparisons
3075 quicker on two vuse vecs. */
3076
3077 void
3078 sort_vuses (VEC (tree,gc) *vuses)
3079 {
3080 if (VEC_length (tree, vuses) > 1)
3081 qsort (VEC_address (tree, vuses),
3082 VEC_length (tree, vuses),
3083 sizeof (tree),
3084 operand_build_cmp);
3085 }
3086
3087 /* Sort the VUSE array so that we can do equality comparisons
3088 quicker on two vuse vecs. */
3089
3090 void
3091 sort_vuses_heap (VEC (tree,heap) *vuses)
3092 {
3093 if (VEC_length (tree, vuses) > 1)
3094 qsort (VEC_address (tree, vuses),
3095 VEC_length (tree, vuses),
3096 sizeof (tree),
3097 operand_build_cmp);
3098 }
3099
3100 3338
3101 /* Return true if the nary operation NARY may trap. This is a copy 3339 /* Return true if the nary operation NARY may trap. This is a copy
3102 of stmt_could_throw_1_p adjusted to the SCCVN IL. */ 3340 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3103 3341
3104 bool 3342 bool