diff gcc/recog.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents a06113de4d67
children b7f97abdc517
line wrap: on
line diff
--- a/gcc/recog.c	Sun Feb 07 18:28:00 2010 +0900
+++ b/gcc/recog.c	Fri Feb 12 23:39:51 2010 +0900
@@ -286,7 +286,7 @@
   else
     return false;
 }
-  
+
 
 /* This subroutine of apply_change_group verifies whether the changes to INSN
    were valid; i.e. whether INSN can still be recognized.  */
@@ -376,7 +376,9 @@
 
       if (MEM_P (object))
 	{
-	  if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
+	  if (! memory_address_addr_space_p (GET_MODE (object),
+					     XEXP (object, 0),
+					     MEM_ADDR_SPACE (object)))
 	    break;
 	}
       else if (REG_P (changes[i].old)
@@ -389,6 +391,8 @@
 	     assemblies if they have been defined as register asm ("x").  */
 	  break;
 	}
+      else if (DEBUG_INSN_P (object))
+	continue;
       else if (insn_invalid_p (object))
 	{
 	  rtx pat = PATTERN (object);
@@ -429,7 +433,8 @@
 	      validate_change (object, &PATTERN (object), newpat, 1);
 	      continue;
 	    }
-	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
+	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+		   || GET_CODE (pat) == VAR_LOCATION)
 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
 	       never recognized.  */
 	    continue;
@@ -524,7 +529,7 @@
    rtx.  */
 
 static void
-simplify_while_replacing (rtx *loc, rtx to, rtx object, 
+simplify_while_replacing (rtx *loc, rtx to, rtx object,
                           enum machine_mode op0_mode)
 {
   rtx x = *loc;
@@ -550,13 +555,13 @@
          simplify_gen_binary to try to simplify it.
          ??? We may want later to remove this, once simplification is
          separated from this function.  */
-      if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
+      if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
 	validate_change (object, loc,
 			 simplify_gen_binary
 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
       break;
     case MINUS:
-      if (GET_CODE (XEXP (x, 1)) == CONST_INT
+      if (CONST_INT_P (XEXP (x, 1))
 	  || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
 	validate_change (object, loc,
 			 simplify_gen_binary
@@ -597,8 +602,8 @@
          happen, we might just fail in some cases).  */
 
       if (MEM_P (XEXP (x, 0))
-	  && GET_CODE (XEXP (x, 1)) == CONST_INT
-	  && GET_CODE (XEXP (x, 2)) == CONST_INT
+	  && CONST_INT_P (XEXP (x, 1))
+	  && CONST_INT_P (XEXP (x, 2))
 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
 	{
@@ -655,7 +660,7 @@
    validate_change passing OBJECT.  */
 
 static void
-validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, 
+validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
                         bool simplify)
 {
   int i, j;
@@ -708,7 +713,7 @@
 				      from, to, object, simplify);
 	    }
 	  else
-	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, 
+	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
                                     simplify);
 	}
     }
@@ -719,7 +724,7 @@
 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
 	else if (fmt[i] == 'E')
 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
-	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, 
+	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
                                     simplify);
       }
 
@@ -738,6 +743,17 @@
     simplify_while_replacing (loc, to, object, op0_mode);
 }
 
+/* Try replacing every occurrence of FROM in subexpression LOC of INSN
+   with TO.  After all changes have been made, validate by seeing
+   if INSN is still valid.  */
+
+int
+validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
+{
+  validate_replace_rtx_1 (loc, from, to, insn, true);
+  return apply_change_group ();
+}
+
 /* Try replacing every occurrence of FROM in INSN with TO.  After all
    changes have been made, validate by seeing if INSN is still valid.  */
 
@@ -749,9 +765,9 @@
 }
 
 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
-   is a part of INSN.  After all changes have been made, validate by seeing if 
-   INSN is still valid.  
-   validate_replace_rtx (from, to, insn) is equivalent to 
+   is a part of INSN.  After all changes have been made, validate by seeing if
+   INSN is still valid.
+   validate_replace_rtx (from, to, insn) is equivalent to
    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
 
 int
@@ -762,8 +778,8 @@
 }
 
 /* Same as above, but do not simplify rtx afterwards.  */
-int 
-validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, 
+int
+validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
                                       rtx insn)
 {
   validate_replace_rtx_1 (where, from, to, insn, false);
@@ -901,7 +917,7 @@
       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
     return 0;
 
-  if (GET_CODE (op) == CONST_INT
+  if (CONST_INT_P (op)
       && mode != VOIDmode
       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
     return 0;
@@ -964,7 +980,7 @@
 	return 0;
 
       /* Use the mem's mode, since it will be reloaded thus.  */
-      if (memory_address_p (GET_MODE (op), y))
+      if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
 	return 1;
     }
 
@@ -1078,7 +1094,7 @@
       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
     return 0;
 
-  if (GET_CODE (op) == CONST_INT
+  if (CONST_INT_P (op)
       && mode != VOIDmode
       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
     return 0;
@@ -1095,7 +1111,7 @@
 int
 const_int_operand (rtx op, enum machine_mode mode)
 {
-  if (GET_CODE (op) != CONST_INT)
+  if (!CONST_INT_P (op))
     return 0;
 
   if (mode != VOIDmode
@@ -1118,7 +1134,7 @@
       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
     return 0;
 
-  return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
+  return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
 	  && (mode == VOIDmode || GET_MODE (op) == mode
 	      || GET_MODE (op) == VOIDmode));
 }
@@ -1145,7 +1161,7 @@
 	  && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
 	return 0;
 
-      if (GET_CODE (op) == CONST_INT
+      if (CONST_INT_P (op)
 	  && mode != VOIDmode
 	  && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
 	return 0;
@@ -1212,7 +1228,7 @@
       if (GET_CODE (op) != PRE_MODIFY
 	  || GET_CODE (XEXP (op, 1)) != PLUS
 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
-	  || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
+	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
 #ifdef STACK_GROWS_DOWNWARD
 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
 #else
@@ -1248,16 +1264,23 @@
   return XEXP (op, 0) == stack_pointer_rtx;
 }
 
-/* Return 1 if ADDR is a valid memory address for mode MODE.  */
+/* Return 1 if ADDR is a valid memory address
+   for mode MODE in address space AS.  */
 
 int
-memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
+memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+			     rtx addr, addr_space_t as)
 {
+#ifdef GO_IF_LEGITIMATE_ADDRESS
+  gcc_assert (ADDR_SPACE_GENERIC_P (as));
   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
   return 0;
 
  win:
   return 1;
+#else
+  return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
+#endif
 }
 
 /* Return 1 if OP is a valid memory reference with mode MODE,
@@ -1309,7 +1332,7 @@
 
       return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
 	      || (GET_CODE (XEXP (inner, 0)) == PLUS
-		  && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
+		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
 		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
 		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
     }
@@ -1319,6 +1342,32 @@
 	  && general_operand (XEXP (op, 0), Pmode));
 }
 
+/* Return 1 if this is an ordered comparison operator (not including
+   ORDERED and UNORDERED).  */
+
+int
+ordered_comparison_operator (rtx op, enum machine_mode mode)
+{
+  if (mode != VOIDmode && GET_MODE (op) != mode)
+    return false;
+  switch (GET_CODE (op))
+    {
+    case EQ:
+    case NE:
+    case LT:
+    case LTU:
+    case LE:
+    case LEU:
+    case GT:
+    case GTU:
+    case GE:
+    case GEU:
+      return true;
+    default:
+      return false;
+    }
+}
+
 /* Return 1 if this is a comparison operator.  This allows the use of
    MATCH_OPERATOR to recognize all the branch insns.  */
 
@@ -1329,6 +1378,42 @@
 	  && COMPARISON_P (op));
 }
 
+/* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
+
+rtx
+extract_asm_operands (rtx body)
+{
+  rtx tmp;
+  switch (GET_CODE (body))
+    {
+    case ASM_OPERANDS:
+      return body;
+
+    case SET:
+      /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
+      tmp = SET_SRC (body);
+      if (GET_CODE (tmp) == ASM_OPERANDS)
+	return tmp;
+      break;
+
+    case PARALLEL:
+      tmp = XVECEXP (body, 0, 0);
+      if (GET_CODE (tmp) == ASM_OPERANDS)
+	return tmp;
+      if (GET_CODE (tmp) == SET)
+	{
+	  tmp = SET_SRC (tmp);
+	  if (GET_CODE (tmp) == ASM_OPERANDS)
+	    return tmp;
+	}
+      break;
+
+    default:
+      break;
+    }
+  return NULL;
+}
+
 /* If BODY is an insn body that uses ASM_OPERANDS,
    return the number of operands (both input and output) in the insn.
    Otherwise return -1.  */
@@ -1336,26 +1421,22 @@
 int
 asm_noperands (const_rtx body)
 {
-  switch (GET_CODE (body))
+  rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
+  int n_sets = 0;
+
+  if (asm_op == NULL)
+    return -1;
+
+  if (GET_CODE (body) == SET)
+    n_sets = 1;
+  else if (GET_CODE (body) == PARALLEL)
     {
-    case ASM_OPERANDS:
-      /* No output operands: return number of input operands.  */
-      return ASM_OPERANDS_INPUT_LENGTH (body);
-    case SET:
-      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
-	/* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
-	return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
-      else
-	return -1;
-    case PARALLEL:
-      if (GET_CODE (XVECEXP (body, 0, 0)) == SET
-	  && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+      int i;
+      if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
 	{
 	  /* Multiple output operands, or 1 output plus some clobbers:
-	     body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
-	  int i;
-	  int n_sets;
-
+	     body is
+	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
 	  for (i = XVECLEN (body, 0); i > 0; i--)
 	    {
@@ -1381,30 +1462,23 @@
 	      /* If these ASM_OPERANDS rtx's came from different original insns
 	         then they aren't allowed together.  */
 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
-		  != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
+		  != ASM_OPERANDS_INPUT_VEC (asm_op))
 		return -1;
 	    }
-	  return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
-		  + n_sets);
 	}
-      else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+      else
 	{
 	  /* 0 outputs, but some clobbers:
 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
-	  int i;
-
 	  /* Make sure all the other parallel things really are clobbers.  */
 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
 	      return -1;
-
-	  return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
 	}
-      else
-	return -1;
-    default:
-      return -1;
     }
+
+  return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
+	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
 }
 
 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
@@ -1422,28 +1496,19 @@
 		     const char **constraints, enum machine_mode *modes,
 		     location_t *loc)
 {
-  int i;
-  int noperands;
-  rtx asmop = 0;
-
-  if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+  int nbase = 0, n, i;
+  rtx asmop;
+
+  switch (GET_CODE (body))
     {
+    case ASM_OPERANDS:
+      /* Zero output asm: BODY is (asm_operands ...).  */
+      asmop = body;
+      break;
+
+    case SET:
+      /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
       asmop = SET_SRC (body);
-      /* Single output operand: BODY is (set OUTPUT (asm_operands ....)).  */
-
-      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
-
-      for (i = 1; i < noperands; i++)
-	{
-	  if (operand_locs)
-	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
-	  if (operands)
-	    operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
-	  if (constraints)
-	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
-	  if (modes)
-	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
-	}
 
       /* The output is in the SET.
 	 Its constraint is in the ASM_OPERANDS itself.  */
@@ -1455,93 +1520,67 @@
 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
       if (modes)
 	modes[0] = GET_MODE (SET_DEST (body));
+      nbase = 1;
+      break;
+
+    case PARALLEL:
+      {
+	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
+
+	asmop = XVECEXP (body, 0, 0);
+	if (GET_CODE (asmop) == SET)
+	  {
+	    asmop = SET_SRC (asmop);
+
+	    /* At least one output, plus some CLOBBERs.  The outputs are in
+	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
+	    for (i = 0; i < nparallel; i++)
+	      {
+		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
+		  break;		/* Past last SET */
+		if (operands)
+		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
+		if (operand_locs)
+		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
+		if (constraints)
+		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
+		if (modes)
+		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
+	      }
+	    nbase = i;
+	  }
+	break;
+      }
+
+    default:
+      gcc_unreachable ();
     }
-  else if (GET_CODE (body) == ASM_OPERANDS)
-    {
-      asmop = body;
-      /* No output operands: BODY is (asm_operands ....).  */
-
-      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
-
-      /* The input operands are found in the 1st element vector.  */
-      /* Constraints for inputs are in the 2nd element vector.  */
-      for (i = 0; i < noperands; i++)
-	{
-	  if (operand_locs)
-	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
-	  if (operands)
-	    operands[i] = ASM_OPERANDS_INPUT (asmop, i);
-	  if (constraints)
-	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
-	  if (modes)
-	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
-	}
-    }
-  else if (GET_CODE (body) == PARALLEL
-	   && GET_CODE (XVECEXP (body, 0, 0)) == SET
-	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+
+  n = ASM_OPERANDS_INPUT_LENGTH (asmop);
+  for (i = 0; i < n; i++)
     {
-      int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
-      int nin;
-      int nout = 0;		/* Does not include CLOBBERs.  */
-
-      asmop = SET_SRC (XVECEXP (body, 0, 0));
-      nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
-
-      /* At least one output, plus some CLOBBERs.  */
-
-      /* The outputs are in the SETs.
-	 Their constraints are in the ASM_OPERANDS itself.  */
-      for (i = 0; i < nparallel; i++)
-	{
-	  if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
-	    break;		/* Past last SET */
-
-	  if (operands)
-	    operands[i] = SET_DEST (XVECEXP (body, 0, i));
-	  if (operand_locs)
-	    operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
-	  if (constraints)
-	    constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
-	  if (modes)
-	    modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
-	  nout++;
-	}
-
-      for (i = 0; i < nin; i++)
-	{
-	  if (operand_locs)
-	    operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
-	  if (operands)
-	    operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
-	  if (constraints)
-	    constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
-	  if (modes)
-	    modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
-	}
+      if (operand_locs)
+	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
+      if (operands)
+	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
+      if (constraints)
+	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
+      if (modes)
+	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
     }
-  else if (GET_CODE (body) == PARALLEL
-	   && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+  nbase += n;
+
+  n = ASM_OPERANDS_LABEL_LENGTH (asmop);
+  for (i = 0; i < n; i++)
     {
-      /* No outputs, but some CLOBBERs.  */
-
-      int nin;
-
-      asmop = XVECEXP (body, 0, 0);
-      nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
-
-      for (i = 0; i < nin; i++)
-	{
-	  if (operand_locs)
-	    operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
-	  if (operands)
-	    operands[i] = ASM_OPERANDS_INPUT (asmop, i);
-	  if (constraints)
-	    constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
-	  if (modes)
-	    modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
-	}
-
+      if (operand_locs)
+	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
+      if (operands)
+	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
+      if (constraints)
+	constraints[nbase + i] = "";
+      if (modes)
+	modes[nbase + i] = Pmode;
     }
 
   if (loc)
@@ -1561,6 +1600,11 @@
   /* Use constrain_operands after reload.  */
   gcc_assert (!reload_completed);
 
+  /* Empty constraint string is the same as "X,...,X", i.e. X for as
+     many alternatives as required to match the other operands.  */
+  if (*constraint == '\0')
+    return 1;
+
   while (*constraint)
     {
       char c = *constraint;
@@ -1666,7 +1710,7 @@
 	  break;
 
 	case 's':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      || (GET_CODE (op) == CONST_DOUBLE
 		  && GET_MODE (op) == VOIDmode))
 	    break;
@@ -1678,49 +1722,49 @@
 	  break;
 
 	case 'n':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      || (GET_CODE (op) == CONST_DOUBLE
 		  && GET_MODE (op) == VOIDmode))
 	    result = 1;
 	  break;
 
 	case 'I':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
 	    result = 1;
 	  break;
 	case 'J':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
 	    result = 1;
 	  break;
 	case 'K':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
 	    result = 1;
 	  break;
 	case 'L':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
 	    result = 1;
 	  break;
 	case 'M':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
 	    result = 1;
 	  break;
 	case 'N':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
 	    result = 1;
 	  break;
 	case 'O':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
 	    result = 1;
 	  break;
 	case 'P':
-	  if (GET_CODE (op) == CONST_INT
+	  if (CONST_INT_P (op)
 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
 	    result = 1;
 	  break;
@@ -1829,7 +1873,8 @@
 offsettable_memref_p (rtx op)
 {
   return ((MEM_P (op))
-	  && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
+	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
+					       MEM_ADDR_SPACE (op)));
 }
 
 /* Similar, but don't require a strictly valid mem ref:
@@ -1839,12 +1884,13 @@
 offsettable_nonstrict_memref_p (rtx op)
 {
   return ((MEM_P (op))
-	  && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
+	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
+					       MEM_ADDR_SPACE (op)));
 }
 
 /* Return 1 if Y is a memory address which contains no side effects
-   and would remain valid after the addition of a positive integer
-   less than the size of that mode.
+   and would remain valid for address space AS after the addition of
+   a positive integer less than the size of that mode.
 
    We assume that the original address is valid and do not check it.
    We do check that it is valid for narrower modes.
@@ -1853,14 +1899,16 @@
    for the sake of use in reload.c.  */
 
 int
-offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
+offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
+				  addr_space_t as)
 {
   enum rtx_code ycode = GET_CODE (y);
   rtx z;
   rtx y1 = y;
   rtx *y2;
-  int (*addressp) (enum machine_mode, rtx) =
-    (strictp ? strict_memory_address_p : memory_address_p);
+  int (*addressp) (enum machine_mode, rtx, addr_space_t) =
+    (strictp ? strict_memory_address_addr_space_p
+	     : memory_address_addr_space_p);
   unsigned int mode_sz = GET_MODE_SIZE (mode);
 
   if (CONSTANT_ADDRESS_P (y))
@@ -1890,7 +1938,7 @@
       *y2 = plus_constant (*y2, mode_sz - 1);
       /* Use QImode because an odd displacement may be automatically invalid
 	 for any wider mode.  But it should be valid for a single byte.  */
-      good = (*addressp) (QImode, y);
+      good = (*addressp) (QImode, y, as);
 
       /* In any case, restore old contents of memory.  */
       *y2 = y1;
@@ -1915,7 +1963,7 @@
 
   /* Use QImode because an odd displacement may be automatically invalid
      for any wider mode.  But it should be valid for a single byte.  */
-  return (*addressp) (QImode, z);
+  return (*addressp) (QImode, z, as);
 }
 
 /* Return 1 if ADDR is an address-expression whose effect depends
@@ -1998,6 +2046,7 @@
     case ASM_INPUT:
     case ADDR_VEC:
     case ADDR_DIFF_VEC:
+    case VAR_LOCATION:
       return;
 
     case SET:
@@ -2459,11 +2508,14 @@
 		if (MEM_P (op))
 		  {
 		    if (strict > 0
-			&& !strict_memory_address_p (GET_MODE (op),
-						     XEXP (op, 0)))
+			&& !strict_memory_address_addr_space_p
+			     (GET_MODE (op), XEXP (op, 0),
+			      MEM_ADDR_SPACE (op)))
 		      break;
 		    if (strict == 0
-			&& !memory_address_p (GET_MODE (op), XEXP (op, 0)))
+			&& !memory_address_addr_space_p
+			     (GET_MODE (op), XEXP (op, 0),
+			      MEM_ADDR_SPACE (op)))
 		      break;
 		    win = 1;
 		  }
@@ -2506,7 +2558,7 @@
 		break;
 
 	      case 's':
-		if (GET_CODE (op) == CONST_INT
+		if (CONST_INT_P (op)
 		    || (GET_CODE (op) == CONST_DOUBLE
 			&& GET_MODE (op) == VOIDmode))
 		  break;
@@ -2516,7 +2568,7 @@
 		break;
 
 	      case 'n':
-		if (GET_CODE (op) == CONST_INT
+		if (CONST_INT_P (op)
 		    || (GET_CODE (op) == CONST_DOUBLE
 			&& GET_MODE (op) == VOIDmode))
 		  win = 1;
@@ -2530,7 +2582,7 @@
 	      case 'N':
 	      case 'O':
 	      case 'P':
-		if (GET_CODE (op) == CONST_INT
+		if (CONST_INT_P (op)
 		    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
 		  win = 1;
 		break;
@@ -3026,6 +3078,26 @@
   return NULL_RTX;
 }
 
+/* Forget all currently tracked instructions, only remember current
+   LIVE regset.  */
+
+static void
+peep2_reinit_state (regset live)
+{
+  int i;
+
+  /* Indicate that all slots except the last holds invalid data.  */
+  for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
+    peep2_insn_data[i].insn = NULL_RTX;
+  peep2_current_count = 0;
+
+  /* Indicate that the last slot contains live_after data.  */
+  peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
+  peep2_current = MAX_INSNS_PER_PEEP2;
+
+  COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
+}
+
 /* Perform the peephole2 optimization pass.  */
 
 static void
@@ -3049,24 +3121,16 @@
   FOR_EACH_BB_REVERSE (bb)
     {
       rtl_profile_for_bb (bb);
-      /* Indicate that all slots except the last holds invalid data.  */
-      for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
-	peep2_insn_data[i].insn = NULL_RTX;
-      peep2_current_count = 0;
-
-      /* Indicate that the last slot contains live_after data.  */
-      peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
-      peep2_current = MAX_INSNS_PER_PEEP2;
 
       /* Start up propagation.  */
       bitmap_copy (live, DF_LR_OUT (bb));
       df_simulate_initialize_backwards (bb, live);
-      bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
+      peep2_reinit_state (live);
 
       for (insn = BB_END (bb); ; insn = prev)
 	{
 	  prev = PREV_INSN (insn);
-	  if (INSN_P (insn))
+	  if (NONDEBUG_INSN_P (insn))
 	    {
 	      rtx attempt, before_try, x;
 	      int match_len;
@@ -3088,7 +3152,7 @@
 		  /* If an insn has RTX_FRAME_RELATED_P set, peephole
 		     substitution would lose the
 		     REG_FRAME_RELATED_EXPR that is attached.  */
-		  peep2_current_count = 0;
+		  peep2_reinit_state (live);
 		  attempt = NULL;
 		}
 	      else
@@ -3177,77 +3241,79 @@
 			if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
 			  break;
 
-		      for (x = attempt ; x != before_try ; x = PREV_INSN (x))
-			if (CALL_P (x)
-			    || (flag_non_call_exceptions
-				&& may_trap_p (PATTERN (x))
-				&& !find_reg_note (x, REG_EH_REGION, NULL)))
-			  {
-			    if (note)
-			      add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
-
-			    if (x != BB_END (bb) && eh_edge)
-			      {
-				edge nfte, nehe;
-				int flags;
-
-				nfte = split_block (bb, x);
-				flags = (eh_edge->flags
-					 & (EDGE_EH | EDGE_ABNORMAL));
-				if (CALL_P (x))
-				  flags |= EDGE_ABNORMAL_CALL;
-				nehe = make_edge (nfte->src, eh_edge->dest,
-						  flags);
-
-				nehe->probability = eh_edge->probability;
-				nfte->probability
-				  = REG_BR_PROB_BASE - nehe->probability;
-
-			        do_cleanup_cfg |= purge_dead_edges (nfte->dest);
-				bb = nfte->src;
-				eh_edge = nehe;
-			      }
-			  }
+		      if (note)
+			copy_reg_eh_region_note_backward (note, attempt,
+							  before_try);
+
+		      if (eh_edge)
+			for (x = attempt ; x != before_try ; x = PREV_INSN (x))
+			  if (x != BB_END (bb)
+			      && (can_throw_internal (x)
+				  || can_nonlocal_goto (x)))
+			    {
+			      edge nfte, nehe;
+			      int flags;
+
+			      nfte = split_block (bb, x);
+			      flags = (eh_edge->flags
+				       & (EDGE_EH | EDGE_ABNORMAL));
+			      if (CALL_P (x))
+				flags |= EDGE_ABNORMAL_CALL;
+			      nehe = make_edge (nfte->src, eh_edge->dest,
+						flags);
+
+			      nehe->probability = eh_edge->probability;
+			      nfte->probability
+				= REG_BR_PROB_BASE - nehe->probability;
+
+			      do_cleanup_cfg |= purge_dead_edges (nfte->dest);
+			      bb = nfte->src;
+			      eh_edge = nehe;
+			    }
 
 		      /* Converting possibly trapping insn to non-trapping is
 			 possible.  Zap dummy outgoing edges.  */
 		      do_cleanup_cfg |= purge_dead_edges (bb);
 		    }
 
-#ifdef HAVE_conditional_execution
-		  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
-		    peep2_insn_data[i].insn = NULL_RTX;
-		  peep2_insn_data[peep2_current].insn = PEEP2_EOB;
-		  peep2_current_count = 0;
-#else
-		  /* Back up lifetime information past the end of the
-		     newly created sequence.  */
-		  if (++i >= MAX_INSNS_PER_PEEP2 + 1)
-		    i = 0;
-		  bitmap_copy (live, peep2_insn_data[i].live_before);
-
-		  /* Update life information for the new sequence.  */
-		  x = attempt;
-		  do
+		  if (targetm.have_conditional_execution ())
+		    {
+		      for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
+			peep2_insn_data[i].insn = NULL_RTX;
+		      peep2_insn_data[peep2_current].insn = PEEP2_EOB;
+		      peep2_current_count = 0;
+		    }
+		  else
 		    {
-		      if (INSN_P (x))
+		      /* Back up lifetime information past the end of the
+			 newly created sequence.  */
+		      if (++i >= MAX_INSNS_PER_PEEP2 + 1)
+			i = 0;
+		      bitmap_copy (live, peep2_insn_data[i].live_before);
+
+		      /* Update life information for the new sequence.  */
+		      x = attempt;
+		      do
 			{
-			  if (--i < 0)
-			    i = MAX_INSNS_PER_PEEP2;
-			  if (peep2_current_count < MAX_INSNS_PER_PEEP2
-			      && peep2_insn_data[i].insn == NULL_RTX)
-			    peep2_current_count++;
-			  peep2_insn_data[i].insn = x;
-			  df_insn_rescan (x);
-			  df_simulate_one_insn_backwards (bb, x, live);
-			  bitmap_copy (peep2_insn_data[i].live_before, live);
+			  if (INSN_P (x))
+			    {
+			      if (--i < 0)
+				i = MAX_INSNS_PER_PEEP2;
+			      if (peep2_current_count < MAX_INSNS_PER_PEEP2
+				  && peep2_insn_data[i].insn == NULL_RTX)
+				peep2_current_count++;
+			      peep2_insn_data[i].insn = x;
+			      df_insn_rescan (x);
+			      df_simulate_one_insn_backwards (bb, x, live);
+			      bitmap_copy (peep2_insn_data[i].live_before,
+					   live);
+			    }
+			  x = PREV_INSN (x);
 			}
-		      x = PREV_INSN (x);
+		      while (x != prev);
+
+		      peep2_current = i;
 		    }
-		  while (x != prev);
-
-		  peep2_current = i;
-#endif
 
 		  /* If we generated a jump instruction, it won't have
 		     JUMP_LABEL set.  Recompute after we're done.  */
@@ -3474,7 +3540,7 @@
   NULL,                                 /* sub */
   NULL,                                 /* next */
   0,                                    /* static_pass_number */
-  0,                                    /* tv_id */
+  TV_NONE,                              /* tv_id */
   0,                                    /* properties_required */
   0,                                    /* properties_provided */
   0,                                    /* properties_destroyed */
@@ -3504,7 +3570,7 @@
   NULL,                                 /* sub */
   NULL,                                 /* next */
   0,                                    /* static_pass_number */
-  0,                                    /* tv_id */
+  TV_NONE,                              /* tv_id */
   0,                                    /* properties_required */
   0,                                    /* properties_provided */
   0,                                    /* properties_destroyed */
@@ -3548,7 +3614,7 @@
   NULL,                                 /* sub */
   NULL,                                 /* next */
   0,                                    /* static_pass_number */
-  0,                                    /* tv_id */
+  TV_NONE,                              /* tv_id */
   0,                                    /* properties_required */
   0,                                    /* properties_provided */
   0,                                    /* properties_destroyed */
@@ -3586,7 +3652,7 @@
   NULL,                                 /* sub */
   NULL,                                 /* next */
   0,                                    /* static_pass_number */
-  0,                                    /* tv_id */
+  TV_NONE,                              /* tv_id */
   0,                                    /* properties_required */
   0,                                    /* properties_provided */
   0,                                    /* properties_destroyed */
@@ -3605,7 +3671,7 @@
   return 1;
 #else
   return 0;
-#endif 
+#endif
 }
 
 struct rtl_opt_pass pass_split_for_shorten_branches =
@@ -3618,7 +3684,7 @@
   NULL,                                 /* sub */
   NULL,                                 /* next */
   0,                                    /* static_pass_number */
-  0,                                    /* tv_id */
+  TV_NONE,                              /* tv_id */
   0,                                    /* properties_required */
   0,                                    /* properties_provided */
   0,                                    /* properties_destroyed */
@@ -3626,5 +3692,3 @@
   TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
  }
 };
-
-