diff gcc/config/rx/rx.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
line wrap: on
line diff
--- a/gcc/config/rx/rx.c	Sun Aug 21 07:07:55 2011 +0900
+++ b/gcc/config/rx/rx.c	Fri Oct 27 22:46:09 2017 +0900
@@ -1,5 +1,5 @@
 /* Subroutines used for code generation on Renesas RX processors.
-   Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+   Copyright (C) 2008-2017 Free Software Foundation, Inc.
    Contributed by Red Hat.
 
    This file is part of GCC.
@@ -25,45 +25,142 @@
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
+#include "rtl.h"
 #include "tree.h"
-#include "rtl.h"
+#include "stringpool.h"
+#include "attribs.h"
+#include "cfghooks.h"
+#include "df.h"
+#include "memmodel.h"
+#include "tm_p.h"
 #include "regs.h"
-#include "hard-reg-set.h"
-#include "insn-config.h"
-#include "conditions.h"
+#include "emit-rtl.h"
+#include "diagnostic-core.h"
+#include "varasm.h"
+#include "stor-layout.h"
+#include "calls.h"
 #include "output.h"
-#include "insn-attr.h"
 #include "flags.h"
-#include "function.h"
+#include "explow.h"
 #include "expr.h"
-#include "optabs.h"
-#include "libfuncs.h"
-#include "recog.h"
-#include "diagnostic-core.h"
 #include "toplev.h"
-#include "reload.h"
-#include "df.h"
-#include "ggc.h"
-#include "tm_p.h"
-#include "debug.h"
-#include "target.h"
+#include "langhooks.h"
+#include "opts.h"
+#include "builtins.h"
+
+/* This file should be included last.  */
 #include "target-def.h"
-#include "langhooks.h"
+
+static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
+static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
+static unsigned int rx_num_interrupt_regs;
 
+static unsigned int
+rx_gp_base_regnum (void)
+{
+  if (rx_gp_base_regnum_val == INVALID_REGNUM)
+    gcc_unreachable ();
+  return rx_gp_base_regnum_val;
+}
+
+static unsigned int
+rx_pid_base_regnum (void)
+{
+  if (rx_pid_base_regnum_val == INVALID_REGNUM)
+    gcc_unreachable ();
+  return rx_pid_base_regnum_val;
+}
+
+/* Find a SYMBOL_REF in a "standard" MEM address and return its decl.  */
+
+static tree
+rx_decl_for_addr (rtx op)
+{
+  if (GET_CODE (op) == MEM)
+    op = XEXP (op, 0);
+  if (GET_CODE (op) == CONST)
+    op = XEXP (op, 0);
+  while (GET_CODE (op) == PLUS)
+    op = XEXP (op, 0);
+  if (GET_CODE (op) == SYMBOL_REF)
+    return SYMBOL_REF_DECL (op);
+  return NULL_TREE;
+}
+
 static void rx_print_operand (FILE *, rtx, int);
 
 #define CC_FLAG_S	(1 << 0)
 #define CC_FLAG_Z	(1 << 1)
 #define CC_FLAG_O	(1 << 2)
 #define CC_FLAG_C	(1 << 3)
-#define CC_FLAG_FP	(1 << 4)	/* fake, to differentiate CC_Fmode */
-
-static unsigned int flags_from_mode (enum machine_mode mode);
+#define CC_FLAG_FP	(1 << 4)	/* Fake, to differentiate CC_Fmode.  */
+
+static unsigned int flags_from_mode (machine_mode mode);
 static unsigned int flags_from_code (enum rtx_code code);
-
-enum rx_cpu_types  rx_cpu_type = RX600;
 
+/* Return true if OP is a reference to an object in a PID data area.  */
+
+enum pid_type
+{
+  PID_NOT_PID = 0,	/* The object is not in the PID data area.  */
+  PID_ENCODED,		/* The object is in the PID data area.  */
+  PID_UNENCODED		/* The object will be placed in the PID data area, but it has not been placed there yet.  */
+};
+
+static enum pid_type
+rx_pid_data_operand (rtx op)
+{
+  tree op_decl;
+
+  if (!TARGET_PID)
+    return PID_NOT_PID;
+
+  if (GET_CODE (op) == PLUS
+      && GET_CODE (XEXP (op, 0)) == REG
+      && GET_CODE (XEXP (op, 1)) == CONST
+      && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
+    return PID_ENCODED;
+
+  op_decl = rx_decl_for_addr (op);
+
+  if (op_decl)
+    {
+      if (TREE_READONLY (op_decl))
+	return PID_UNENCODED;
+    }
+  else
+    {
+      /* Sigh, some special cases.  */
+      if (GET_CODE (op) == SYMBOL_REF
+	  || GET_CODE (op) == LABEL_REF)
+	return PID_UNENCODED;
+    }
+
+  return PID_NOT_PID;
+}
+
+static rtx
+rx_legitimize_address (rtx x,
+		       rtx oldx ATTRIBUTE_UNUSED,
+		       machine_mode mode ATTRIBUTE_UNUSED)
+{
+  if (rx_pid_data_operand (x) == PID_UNENCODED)
+    {
+      rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
+      return rv;
+    }
+
+  if (GET_CODE (x) == PLUS
+      && GET_CODE (XEXP (x, 0)) == PLUS
+      && REG_P (XEXP (XEXP (x, 0), 0)) 
+      && REG_P (XEXP (x, 1)))
+    return force_reg (SImode, x);
+
+  return x;
+}
+
 /* Return true if OP is a reference to an object in a small data area.  */
 
 static bool
@@ -79,18 +176,31 @@
 }
 
 static bool
-rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
+rx_is_legitimate_address (machine_mode mode, rtx x,
+			  bool strict ATTRIBUTE_UNUSED)
 {
   if (RTX_OK_FOR_BASE (x, strict))
     /* Register Indirect.  */
     return true;
 
-  if (GET_MODE_SIZE (mode) == 4
+  if ((GET_MODE_SIZE (mode) == 4
+       || GET_MODE_SIZE (mode) == 2
+       || GET_MODE_SIZE (mode) == 1)
       && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
     /* Pre-decrement Register Indirect or
        Post-increment Register Indirect.  */
     return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
 
+  switch (rx_pid_data_operand (x))
+    {
+    case PID_UNENCODED:
+      return false;
+    case PID_ENCODED:
+      return true;
+    default:
+      break;
+    }
+
   if (GET_CODE (x) == PLUS)
     {
       rtx arg1 = XEXP (x, 0);
@@ -116,7 +226,7 @@
 
 	    if (val < 0)
 	      return false;
-	    
+
 	    switch (GET_MODE_SIZE (mode))
 	      {
 	      default: 
@@ -159,15 +269,13 @@
   return rx_small_data_operand (x);
 }
 
-/* Returns TRUE for simple memory addreses, ie ones
+/* Returns TRUE for simple memory addresses, ie ones
    that do not involve register indirect addressing
    or pre/post increment/decrement.  */
 
 bool
-rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
+rx_is_restricted_memory_address (rtx mem, machine_mode mode)
 {
-  rtx base, index;
-
   if (! rx_is_legitimate_address
       (mode, mem, reload_in_progress || reload_completed))
     return false;
@@ -183,11 +291,18 @@
       return false;
 
     case PLUS:
-      /* Only allow REG+INT addressing.  */
-      base = XEXP (mem, 0);
-      index = XEXP (mem, 1);
-
-      return RX_REG_P (base) && CONST_INT_P (index);
+      {
+	rtx base, index;
+	
+	/* Only allow REG+INT addressing.  */
+	base = XEXP (mem, 0);
+	index = XEXP (mem, 1);
+
+	if (! RX_REG_P (base) || ! CONST_INT_P (index))
+	  return false;
+
+	return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
+      }
 
     case SYMBOL_REF:
       /* Can happen when small data is being supported.
@@ -202,7 +317,7 @@
 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P.  */
 
 static bool
-rx_mode_dependent_address_p (const_rtx addr)
+rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
 {
   if (GET_CODE (addr) == CONST)
     addr = XEXP (addr, 0);
@@ -229,9 +344,9 @@
 
 	case CONST_INT:
 	  /* REG+INT is only mode independent if INT is a
-	     multiple of 4, positive and will fit into 8-bits.  */
+	     multiple of 4, positive and will fit into 16-bits.  */
 	  if (((INTVAL (addr) & 3) == 0)
-	      && IN_RANGE (INTVAL (addr), 4, 252))
+	      && IN_RANGE (INTVAL (addr), 4, 0xfffc))
 	    return false;
 	  return true;
 
@@ -240,8 +355,6 @@
 	  return true;
 
 	case MULT:
-	  gcc_assert (REG_P (XEXP (addr, 0)));
-	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
 	  /* REG+REG*SCALE is always mode dependent.  */
 	  return true;
 
@@ -269,7 +382,7 @@
    reference whose address is ADDR.  */
 
 static void
-rx_print_operand_address (FILE * file, rtx addr)
+rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
 {
   switch (GET_CODE (addr))
     {
@@ -330,15 +443,24 @@
 	{
 	  addr = XEXP (addr, 0);
 	  gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
-      
-	  addr = XVECEXP (addr, 0, 0);
+
+          addr = XVECEXP (addr, 0, 0);
 	  gcc_assert (CONST_INT_P (addr));
+	  fprintf (file, "#");
+	  output_addr_const (file, addr);
+	  break;
 	}
+      fprintf (file, "#");
+      output_addr_const (file, XEXP (addr, 0));
+      break;
+
+    case UNSPEC:
+      addr = XVECEXP (addr, 0, 0);
       /* Fall through.  */
     case LABEL_REF:
     case SYMBOL_REF:
       fprintf (file, "#");
-
+      /* Fall through.  */
     default:
       output_addr_const (file, addr);
       break;
@@ -348,7 +470,7 @@
 static void
 rx_print_integer (FILE * file, HOST_WIDE_INT val)
 {
-  if (IN_RANGE (val, -64, 64))
+  if (val < 64)
     fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
   else
     fprintf (file,
@@ -382,15 +504,30 @@
      %B  Print an integer comparison name.
      %C  Print a control register name.
      %F  Print a condition code flag name.
+     %G  Register used for small-data-area addressing
      %H  Print high part of a DImode register, integer or address.
      %L  Print low part of a DImode register, integer or address.
      %N  Print the negation of the immediate value.
+     %P  Register used for PID addressing
      %Q  If the operand is a MEM, then correctly generate
-         register indirect or register relative addressing.  */
+         register indirect or register relative addressing.
+     %R  Like %Q but for zero-extending loads.  */
 
 static void
 rx_print_operand (FILE * file, rtx op, int letter)
 {
+  bool unsigned_load = false;
+  bool print_hash = true;
+
+  if (letter == 'A'
+      && ((GET_CODE (op) == CONST
+	   && GET_CODE (XEXP (op, 0)) == UNSPEC)
+	  || GET_CODE (op) == UNSPEC))
+    {
+      print_hash = false;
+      letter = 0;
+    }
+
   switch (letter)
     {
     case 'A':
@@ -416,7 +553,7 @@
     case 'B':
       {
 	enum rtx_code code = GET_CODE (op);
-	enum machine_mode mode = GET_MODE (XEXP (op, 0));
+	machine_mode mode = GET_MODE (XEXP (op, 0));
 	const char *ret;
 
 	if (mode == CC_Fmode)
@@ -450,6 +587,7 @@
 	else
 	  {
 	    unsigned int flags = flags_from_mode (mode);
+
 	    switch (code)
 	      {
 	      case LT:
@@ -495,17 +633,17 @@
       gcc_assert (CONST_INT_P (op));
       switch (INTVAL (op))
 	{
-	case 0:   fprintf (file, "psw"); break;
-	case 2:   fprintf (file, "usp"); break;
-	case 3:   fprintf (file, "fpsw"); break;
-	case 4:   fprintf (file, "cpen"); break;
-	case 8:   fprintf (file, "bpsw"); break;
-	case 9:   fprintf (file, "bpc"); break;
-	case 0xa: fprintf (file, "isp"); break;
-	case 0xb: fprintf (file, "fintv"); break;
-	case 0xc: fprintf (file, "intb"); break;
+	case CTRLREG_PSW:   fprintf (file, "psw"); break;
+	case CTRLREG_USP:   fprintf (file, "usp"); break;
+	case CTRLREG_FPSW:  fprintf (file, "fpsw"); break;
+	case CTRLREG_CPEN:  fprintf (file, "cpen"); break;
+	case CTRLREG_BPSW:  fprintf (file, "bpsw"); break;
+	case CTRLREG_BPC:   fprintf (file, "bpc"); break;
+	case CTRLREG_ISP:   fprintf (file, "isp"); break;
+	case CTRLREG_FINTV: fprintf (file, "fintv"); break;
+	case CTRLREG_INTB:  fprintf (file, "intb"); break;
 	default:
-	  warning (0, "unreocgnized control register number: %d - using 'psw'",
+	  warning (0, "unrecognized control register number: %d - using 'psw'",
 		   (int) INTVAL (op));
 	  fprintf (file, "psw");
 	  break;
@@ -527,6 +665,10 @@
 	}
       break;
 
+    case 'G':
+      fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
+      break;
+
     case 'H':
       switch (GET_CODE (op))
 	{
@@ -551,7 +693,7 @@
 	case MEM:
 	  if (! WORDS_BIG_ENDIAN)
 	    op = adjust_address (op, SImode, 4);
-	  output_address (XEXP (op, 0));
+	  output_address (GET_MODE (op), XEXP (op, 0));
 	  break;
 	default:
 	  gcc_unreachable ();
@@ -575,7 +717,7 @@
 	case MEM:
 	  if (WORDS_BIG_ENDIAN)
 	    op = adjust_address (op, SImode, 4);
-	  output_address (XEXP (op, 0));
+	  output_address (GET_MODE (op), XEXP (op, 0));
 	  break;
 	default:
 	  gcc_unreachable ();
@@ -588,10 +730,19 @@
       rx_print_integer (file, - INTVAL (op));
       break;
 
+    case 'P':
+      fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
+      break;
+
+    case 'R':
+      gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
+      unsigned_load = true;
+      /* Fall through.  */
     case 'Q':
       if (MEM_P (op))
 	{
 	  HOST_WIDE_INT offset;
+	  rtx mem = op;
 
 	  op = XEXP (op, 0);
 
@@ -626,22 +777,24 @@
 	  rx_print_operand (file, op, 0);
 	  fprintf (file, "].");
 
-	  switch (GET_MODE_SIZE (GET_MODE (op)))
+	  switch (GET_MODE_SIZE (GET_MODE (mem)))
 	    {
 	    case 1:
-	      gcc_assert (offset < 65535 * 1);
-	      fprintf (file, "B");
+	      gcc_assert (offset <= 65535 * 1);
+	      fprintf (file, unsigned_load ? "UB" : "B");
 	      break;
 	    case 2:
 	      gcc_assert (offset % 2 == 0);
-	      gcc_assert (offset < 65535 * 2);
-	      fprintf (file, "W");
+	      gcc_assert (offset <= 65535 * 2);
+	      fprintf (file, unsigned_load ? "UW" : "W");
+	      break;
+	    case 4:
+	      gcc_assert (offset % 4 == 0);
+	      gcc_assert (offset <= 65535 * 4);
+	      fprintf (file, "L");
 	      break;
 	    default:
-	      gcc_assert (offset % 4 == 0);
-	      gcc_assert (offset < 65535 * 4);
-	      fprintf (file, "L");
-	      break;
+	      gcc_unreachable ();
 	    }
 	  break;
 	}
@@ -649,6 +802,24 @@
       /* Fall through.  */
 
     default:
+      if (GET_CODE (op) == CONST
+	  && GET_CODE (XEXP (op, 0)) == UNSPEC)
+	op = XEXP (op, 0);
+      else if (GET_CODE (op) == CONST
+	       && GET_CODE (XEXP (op, 0)) == PLUS
+	       && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
+	       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
+	{
+	  if (print_hash)
+	    fprintf (file, "#");
+	  fprintf (file, "(");
+	  rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
+	  fprintf (file, " + ");
+	  output_addr_const (file, XEXP (XEXP (op, 0), 1));
+	  fprintf (file, ")");
+	  return;
+	}
+
       switch (GET_CODE (op))
 	{
 	case MULT:
@@ -678,11 +849,11 @@
 	  }
 
 	case MEM:
-	  output_address (XEXP (op, 0));
+	  output_address (GET_MODE (op), XEXP (op, 0));
 	  break;
 
 	case PLUS:
-	  output_address (op);
+	  output_address (VOIDmode, op);
 	  break;
 
 	case REG:
@@ -699,25 +870,55 @@
 	case CONST_DOUBLE:
 	  {
 	    unsigned long val;
-	    REAL_VALUE_TYPE rv;
-
-	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
-	    REAL_VALUE_TO_TARGET_SINGLE (rv, val);
-	    fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
+
+	    REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
+	    if (print_hash)
+	      fprintf (file, "#");
+	    fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
 	    break;
 	  }
 
 	case CONST_INT:
-	  fprintf (file, "#");
+	  if (print_hash)
+	    fprintf (file, "#");
 	  rx_print_integer (file, INTVAL (op));
 	  break;
 
+	case UNSPEC:
+	  switch (XINT (op, 1))
+	    {
+	    case UNSPEC_PID_ADDR:
+	      {
+		rtx sym, add;
+
+		if (print_hash)
+		  fprintf (file, "#");
+		sym = XVECEXP (op, 0, 0);
+		add = NULL_RTX;
+		fprintf (file, "(");
+		if (GET_CODE (sym) == PLUS)
+		  {
+		    add = XEXP (sym, 1);
+		    sym = XEXP (sym, 0);
+		  }
+		output_addr_const (file, sym);
+		if (add != NULL_RTX)
+		  {
+		    fprintf (file, "+");
+		    output_addr_const (file, add);
+		  }
+		fprintf (file, "-__pid_base");
+		fprintf (file, ")");
+		return;
+	      }
+	    }
+	  /* Fall through */
+
+	case CONST:
 	case SYMBOL_REF:
-	case CONST:
 	case LABEL_REF:
 	case CODE_LABEL:
-	case UNSPEC:
-	  rx_print_operand_address (file, op);
+	  rx_print_operand_address (file, VOIDmode, op);
 	  break;
 
 	default:
@@ -727,6 +928,29 @@
     }
 }
 
+/* Maybe convert an operand into its PID format.  */
+
+rtx
+rx_maybe_pidify_operand (rtx op, int copy_to_reg)
+{
+  if (rx_pid_data_operand (op) == PID_UNENCODED)
+    {
+      if (GET_CODE (op) == MEM)
+	{
+	  rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
+	  op = replace_equiv_address (op, a);
+	}
+      else
+	{
+	  op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
+	}
+
+      if (copy_to_reg)
+	op = copy_to_mode_reg (GET_MODE (op), op);
+    }
+  return op;
+}
+
 /* Returns an assembler template for a move instruction.  */
 
 char *
@@ -742,23 +966,25 @@
   /* Decide which extension, if any, should be given to the move instruction.  */
   switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
     {
-    case QImode:
+    case E_QImode:
       /* The .B extension is not valid when
 	 loading an immediate into a register.  */
       if (! REG_P (dest) || ! CONST_INT_P (src))
 	extension = ".B";
       break;
-    case HImode:
+    case E_HImode:
       if (! REG_P (dest) || ! CONST_INT_P (src))
 	/* The .W extension is not valid when
 	   loading an immediate into a register.  */
 	extension = ".W";
       break;
-    case SFmode:
-    case SImode:
+    case E_DFmode:
+    case E_DImode:
+    case E_SFmode:
+    case E_SImode:
       extension = ".L";
       break;
-    case VOIDmode:
+    case E_VOIDmode:
       /* This mode is used by constants.  */
       break;
     default:
@@ -766,18 +992,45 @@
       gcc_unreachable ();
     }
 
-  if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
-    src_template = "%%gp(%A1)[r13]";
+  if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
+    {
+      gcc_assert (GET_MODE (src) != DImode);
+      gcc_assert (GET_MODE (src) != DFmode);
+      
+      src_template = "(%A1 - __pid_base)[%P1]";
+    }
+  else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
+    {
+      gcc_assert (GET_MODE (src) != DImode);
+      gcc_assert (GET_MODE (src) != DFmode);
+      
+      src_template = "%%gp(%A1)[%G1]";
+    }
   else
     src_template = "%1";
 
   if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
-    dst_template = "%%gp(%A0)[r13]";
+    {
+      gcc_assert (GET_MODE (dest) != DImode);
+      gcc_assert (GET_MODE (dest) != DFmode);
+      
+      dst_template = "%%gp(%A0)[%G0]";
+    }
   else
     dst_template = "%0";
 
-  sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
-	   extension, src_template, dst_template);
+  if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
+    {
+      gcc_assert (! is_movu);
+
+      if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
+	sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
+      else
+	sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
+    }
+  else
+    sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
+	     extension, src_template, dst_template);
   return out_template;
 }
 
@@ -794,7 +1047,7 @@
    occupied by an argument of type TYPE and mode MODE.  */
 
 static unsigned int
-rx_function_arg_size (Mmode mode, const_tree type)
+rx_function_arg_size (machine_mode mode, const_tree type)
 {
   unsigned int num_bytes;
 
@@ -814,10 +1067,11 @@
    variable parameter list.  */
 
 static rtx
-rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
+rx_function_arg (cumulative_args_t cum, machine_mode mode,
+		 const_tree type, bool named)
 {
   unsigned int next_reg;
-  unsigned int bytes_so_far = *cum;
+  unsigned int bytes_so_far = *get_cumulative_args (cum);
   unsigned int size;
   unsigned int rounded_size;
 
@@ -851,17 +1105,30 @@
 }
 
 static void
-rx_function_arg_advance (Fargs * cum, Mmode mode, const_tree type,
-			 bool named ATTRIBUTE_UNUSED)
+rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
+			 const_tree type, bool named ATTRIBUTE_UNUSED)
 {
-  *cum += rx_function_arg_size (mode, type);
+  *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
 }
 
 static unsigned int
-rx_function_arg_boundary (Mmode mode ATTRIBUTE_UNUSED,
+rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
 			  const_tree type ATTRIBUTE_UNUSED)
 {
-  return 32;
+  /* Older versions of the RX backend aligned all on-stack arguments
+     to 32-bits.  The RX C ABI however says that they should be
+     aligned to their natural alignment.  (See section 5.2.2 of the ABI).  */
+  if (TARGET_GCC_ABI)
+    return STACK_BOUNDARY;
+
+  if (type)
+    {
+      if (DECL_P (type))
+	return DECL_ALIGN (type);
+      return TYPE_ALIGN (type);
+    }
+
+  return PARM_BOUNDARY;
 }
 
 /* Return an RTL describing where a function return value of type RET_TYPE
@@ -872,13 +1139,15 @@
 		   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
 		   bool       outgoing ATTRIBUTE_UNUSED)
 {
-  enum machine_mode mode = TYPE_MODE (ret_type);
+  machine_mode mode = TYPE_MODE (ret_type);
 
   /* RX ABI specifies that small integer types are
      promoted to int when returned by a function.  */
   if (GET_MODE_SIZE (mode) > 0
       && GET_MODE_SIZE (mode) < 4
       && ! COMPLEX_MODE_P (mode)
+      && ! VECTOR_TYPE_P (ret_type)
+      && ! VECTOR_MODE_P (mode)
       )
     return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
     
@@ -888,9 +1157,9 @@
 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
    regard to function returns as does TARGET_FUNCTION_VALUE.  */
 
-static enum machine_mode
+static machine_mode
 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
-			  enum machine_mode mode,
+			  machine_mode mode,
 			  int * punsignedp ATTRIBUTE_UNUSED,
 			  const_tree funtype ATTRIBUTE_UNUSED,
 			  int for_return)
@@ -898,6 +1167,8 @@
   if (for_return != 1
       || GET_MODE_SIZE (mode) >= 4
       || COMPLEX_MODE_P (mode)
+      || VECTOR_MODE_P (mode)
+      || VECTOR_TYPE_P (type)
       || GET_MODE_SIZE (mode) < 1)
     return mode;
 
@@ -948,7 +1219,7 @@
 
 /* Returns true if the provided function has the "fast_interrupt" attribute.  */
 
-static inline bool
+bool
 is_fast_interrupt_func (const_tree decl)
 {
   return has_func_attr (decl, "fast_interrupt");
@@ -956,7 +1227,7 @@
 
 /* Returns true if the provided function has the "interrupt" attribute.  */
 
-static inline bool
+bool
 is_interrupt_func (const_tree decl)
 {
   return has_func_attr (decl, "interrupt");
@@ -977,8 +1248,21 @@
 {
   static bool using_fixed_regs = false;
 
+  if (TARGET_PID)
+    {
+      rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
+      fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
+    }
+
   if (rx_small_data_limit > 0)
-    fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
+    {
+      if (TARGET_PID)
+	rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
+      else
+	rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
+
+      fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
+    }
 
   if (use_fixed_regs != using_fixed_regs)
     {
@@ -1018,6 +1302,41 @@
     }
 }
 
+struct decl_chain
+{
+  tree fndecl;
+  struct decl_chain * next;
+};
+
+/* Stack of decls for which we have issued warnings.  */
+static struct decl_chain * warned_decls = NULL;
+
+static void
+add_warned_decl (tree fndecl)
+{
+  struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
+
+  warned->fndecl = fndecl;
+  warned->next = warned_decls;
+  warned_decls = warned;
+}
+
+/* Returns TRUE if FNDECL is on our list of warned about decls.  */
+
+static bool
+already_warned (tree fndecl)
+{
+  struct decl_chain * warned;
+
+  for (warned = warned_decls;
+       warned != NULL;
+       warned = warned->next)
+    if (warned->fndecl == fndecl)
+      return true;
+
+  return false;
+}
+
 /* Perform any actions necessary before starting to compile FNDECL.
    For the RX we use this to make sure that we have the correct
    set of register masks selected.  If FNDECL is NULL then we are
@@ -1050,6 +1369,24 @@
       target_reinit ();
     }
 
+  if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
+    {
+      /* We do not warn about the first fast interrupt routine that
+	 we see.  Instead we just push it onto the stack.  */
+      if (warned_decls == NULL)
+	add_warned_decl (fndecl);
+
+      /* Otherwise if this fast interrupt is one for which we have
+	 not already issued a warning, generate one and then push
+	 it onto the stack as well.  */
+      else if (! already_warned (fndecl))
+	{
+	  warning (0, "multiple fast interrupt routines seen: %qE and %qE",
+		   fndecl, warned_decls->fndecl);
+	  add_warned_decl (fndecl);
+	}
+    }
+
   rx_previous_fndecl = fndecl;
 }
 
@@ -1144,7 +1481,7 @@
 	      be used in (non-interrupt aware) routines called from this one.  */
 	   || (call_used_regs[reg]
 	       && is_interrupt_func (NULL_TREE)
-	       && ! current_function_is_leaf))
+	       && ! crtl->is_leaf))
 	  && (! call_used_regs[reg]
 	      /* Even call clobbered registered must
 		 be pushed inside interrupt handlers.  */
@@ -1201,8 +1538,12 @@
      has specified --fixed-<reg-name> on the command line and in such
      circumstances we do not want to touch the fixed registers at all.
 
+     Note also that the code in the prologue/epilogue handlers will
+     automatically merge multiple PUSHes of adjacent registers into a single
+     PUSHM.
+
      FIXME: Is it worth improving this heuristic ?  */
-  pushed_mask = (-1 << low) & ~(-1 << (high + 1));
+  pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
   unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
 
   if ((fixed_reg && fixed_reg <= high)
@@ -1267,14 +1608,13 @@
   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
 
   XVECEXP (vector, 0, 0) =
-    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
+    gen_rtx_SET (stack_pointer_rtx,
 		 gen_rtx_MINUS (SImode, stack_pointer_rtx,
 				GEN_INT ((count - 1) * UNITS_PER_WORD)));
 
   for (i = 0; i < count - 1; i++)
     XVECEXP (vector, 0, i + 1) =
-      gen_rtx_SET (VOIDmode,
-		   gen_rtx_MEM (SImode,
+      gen_rtx_SET (gen_rtx_MEM (SImode,
 				gen_rtx_MINUS (SImode, stack_pointer_rtx,
 					       GEN_INT ((i + 1) * UNITS_PER_WORD))),
 		   gen_rtx_REG (SImode, high - i));
@@ -1309,7 +1649,7 @@
 
   /* rx_max_constant_size specifies the maximum number
      of bytes that can be used to hold a signed value.  */
-  return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
+  return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
 		        ( 1 << (rx_max_constant_size * 8)));
 }
 
@@ -1342,14 +1682,25 @@
 	   as the dwarf2out code cannot be expected to grok
 	   our unspec.  */
 	add_reg_note (insn, REG_FRAME_RELATED_EXPR,
-		      gen_rtx_SET (SImode, dest,
-				   gen_rtx_PLUS (SImode, src, val)));
+		      gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
       return;
     }
 
   if (is_frame_related)
     RTX_FRAME_RELATED_P (insn) = 1;
-  return;
+}
+
+static void
+push_regs (unsigned int high, unsigned int low)
+{
+  rtx insn;
+
+  if (low == high)
+    insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
+  else
+    insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
+				       gen_rx_store_vector (low, high)));
+  mark_frame_related (insn);
 }
 
 void
@@ -1361,7 +1712,6 @@
   unsigned int low;
   unsigned int high;
   unsigned int reg;
-  rtx insn;
 
   /* Naked functions use their own, programmer provided prologues.  */
   if (is_naked_func (NULL_TREE))
@@ -1369,6 +1719,9 @@
 
   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
 
+  if (flag_stack_usage_info)
+    current_function_static_stack_size = frame_size + stack_size;
+
   /* If we use any of the callee-saved registers, save them now.  */
   if (mask)
     {
@@ -1376,20 +1729,25 @@
       for (reg = CC_REGNUM; reg --;)
 	if (mask & (1 << reg))
 	  {
-	    insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
-	    mark_frame_related (insn);
+	    low = high = reg;
+
+	    /* Look for a span of registers.
+	       Note - we do not have to worry about -Os and whether
+	       it is better to use a single, longer PUSHM as
+	       rx_get_stack_layout has already done that for us.  */
+	    while (reg-- > 0)
+	      if ((mask & (1 << reg)) == 0)
+		break;
+	      else
+		--low;
+
+	    push_regs (high, low);
+	    if (reg == (unsigned) -1)
+	      break;
 	  }
     }
   else if (low)
-    {
-      if (high == low)
-	insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
-      else
-	insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
-						    * UNITS_PER_WORD),
-					   gen_rx_store_vector (low, high)));
-      mark_frame_related (insn);
-    }
+    push_regs (high, low);
 
   if (MUST_SAVE_ACC_REGISTER)
     {
@@ -1469,14 +1827,72 @@
 		      GEN_INT (- (HOST_WIDE_INT) frame_size), true);
       else
 	gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
-		      true);
+		      false /* False because the epilogue will use the FP not the SP.  */);
     }
 }
 
 static void
-rx_output_function_prologue (FILE * file,
-			     HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
+add_vector_labels (FILE *file, const char *aname)
 {
+  tree vec_attr;
+  tree val_attr;
+  const char *vname = "vect";
+  const char *s;
+  int vnum;
+
+  /* This node is for the vector/interrupt tag itself */
+  vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
+  if (!vec_attr)
+    return;
+
+  /* Now point it at the first argument */
+  vec_attr = TREE_VALUE (vec_attr);
+
+  /* Iterate through the arguments.  */
+  while (vec_attr)
+    {
+      val_attr = TREE_VALUE (vec_attr);
+      switch (TREE_CODE (val_attr))
+	{
+	case STRING_CST:
+	  s = TREE_STRING_POINTER (val_attr);
+	  goto string_id_common;
+
+	case IDENTIFIER_NODE:
+	  s = IDENTIFIER_POINTER (val_attr);
+
+	string_id_common:
+	  if (strcmp (s, "$default") == 0)
+	    {
+	      fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
+	      fprintf (file, "$tableentry$default$%s:\n", vname);
+	    }
+	  else
+	    vname = s;
+	  break;
+
+	case INTEGER_CST:
+	  vnum = TREE_INT_CST_LOW (val_attr);
+
+	  fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
+	  fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
+	  break;
+
+	default:
+	  ;
+	}
+
+      vec_attr = TREE_CHAIN (vec_attr);
+    }
+
+}
+
+static void
+rx_output_function_prologue (FILE * file)
+{
+  add_vector_labels (file, "interrupt");
+  add_vector_labels (file, "vector");
+
   if (is_fast_interrupt_func (NULL_TREE))
     asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
 
@@ -1538,19 +1954,18 @@
   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
 
   XVECEXP (vector, 0, 0) =
-    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
-		 plus_constant (stack_pointer_rtx, adjust));
+    gen_rtx_SET (stack_pointer_rtx,
+		 plus_constant (Pmode, stack_pointer_rtx, adjust));
 
   for (i = 0; i < count - 2; i++)
     XVECEXP (vector, 0, i + 1) =
-      gen_rtx_SET (VOIDmode,
-		   gen_rtx_REG (SImode, low + i),
+      gen_rtx_SET (gen_rtx_REG (SImode, low + i),
 		   gen_rtx_MEM (SImode,
 				i == 0 ? stack_pointer_rtx
-				: plus_constant (stack_pointer_rtx,
+				: plus_constant (Pmode, stack_pointer_rtx,
 						 i * UNITS_PER_WORD)));
 
-  XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
+  XVECEXP (vector, 0, count - 1) = ret_rtx;
 
   return vector;
 }
@@ -1567,22 +1982,55 @@
   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
 
   XVECEXP (vector, 0, 0) =
-    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
-		 plus_constant (stack_pointer_rtx,
+    gen_rtx_SET (stack_pointer_rtx,
+		 plus_constant (Pmode, stack_pointer_rtx,
 				(count - 1) * UNITS_PER_WORD));
 
   for (i = 0; i < count - 1; i++)
     XVECEXP (vector, 0, i + 1) =
-      gen_rtx_SET (VOIDmode,
-		   gen_rtx_REG (SImode, low + i),
+      gen_rtx_SET (gen_rtx_REG (SImode, low + i),
 		   gen_rtx_MEM (SImode,
 				i == 0 ? stack_pointer_rtx
-				: plus_constant (stack_pointer_rtx,
+				: plus_constant (Pmode, stack_pointer_rtx,
 						 i * UNITS_PER_WORD)));
 
   return vector;
 }
-  
+
+/* Returns true if a simple return insn can be used.  */
+
+bool
+rx_can_use_simple_return (void)
+{
+  unsigned int low;
+  unsigned int high;
+  unsigned int frame_size;
+  unsigned int stack_size;
+  unsigned int register_mask;
+
+  if (is_naked_func (NULL_TREE)
+      || is_fast_interrupt_func (NULL_TREE)
+      || is_interrupt_func (NULL_TREE))
+    return false;
+
+  rx_get_stack_layout (& low, & high, & register_mask,
+		       & frame_size, & stack_size);
+
+  return (register_mask == 0
+	  && (frame_size + stack_size) == 0
+	  && low == 0);
+}
+
+static void
+pop_regs (unsigned int high, unsigned int low)
+{
+  if (high == low)
+    emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
+  else
+    emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
+			       gen_rx_popm_vector (low, high)));
+}
+
 void
 rx_expand_epilogue (bool is_sibcall)
 {
@@ -1695,16 +2143,16 @@
 	{
 	  for (reg = 0; reg < CC_REGNUM; reg ++)
 	    if (register_mask & (1 << reg))
-	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
+	      {
+		low = high = reg;
+		while (register_mask & (1 << high))
+		  high ++;
+		pop_regs (high - 1, low);
+		reg = high;
+	      }
 	}
       else if (low)
-	{
-	  if (high == low)
-	    emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
-	  else
-	    emit_insn (gen_stack_popm (GEN_INT (regs_size),
-				       gen_rx_popm_vector (low, high)));
-	}
+	pop_regs (high, low);
 
       if (is_fast_interrupt_func (NULL_TREE))
 	{
@@ -1799,7 +2247,7 @@
 rx_in_small_data (const_tree decl)
 {
   int size;
-  const_tree section;
+  const char * section;
 
   if (rx_small_data_limit == 0)
     return false;
@@ -1818,11 +2266,7 @@
 
   section = DECL_SECTION_NAME (decl);
   if (section)
-    {
-      const char * const name = TREE_STRING_POINTER (section);
-
-      return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
-    }
+    return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
 
   size = int_size_in_bytes (TREE_TYPE (decl));
 
@@ -1833,7 +2277,7 @@
    The only special thing we do here is to honor small data.  */
 
 static section *
-rx_select_rtx_section (enum machine_mode mode,
+rx_select_rtx_section (machine_mode mode,
 		       rtx x,
 		       unsigned HOST_WIDE_INT align)
 {
@@ -1911,11 +2355,22 @@
   RX_BUILTIN_max
 };
 
+static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
+
 static void
 rx_init_builtins (void)
 {
+#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE)		\
+   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
+   add_builtin_function ("__builtin_rx_" LC_NAME,			\
+			build_function_type_list (RET_TYPE##_type_node, \
+						  NULL_TREE),		\
+			RX_BUILTIN_##UC_NAME,				\
+			BUILT_IN_MD, NULL, NULL_TREE)
+
 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)		\
-  add_builtin_function ("__builtin_rx_" LC_NAME,			\
+   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
+   add_builtin_function ("__builtin_rx_" LC_NAME,			\
 			build_function_type_list (RET_TYPE##_type_node, \
 						  ARG_TYPE##_type_node, \
 						  NULL_TREE),		\
@@ -1923,6 +2378,7 @@
 			BUILT_IN_MD, NULL, NULL_TREE)
 
 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
+  rx_builtins[RX_BUILTIN_##UC_NAME] =					\
   add_builtin_function ("__builtin_rx_" LC_NAME,			\
 			build_function_type_list (RET_TYPE##_type_node, \
 						  ARG_TYPE1##_type_node,\
@@ -1932,6 +2388,7 @@
 			BUILT_IN_MD, NULL, NULL_TREE)
 
 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
+  rx_builtins[RX_BUILTIN_##UC_NAME] =					\
   add_builtin_function ("__builtin_rx_" LC_NAME,			\
 			build_function_type_list (RET_TYPE##_type_node, \
 						  ARG_TYPE1##_type_node,\
@@ -1941,7 +2398,7 @@
 			RX_BUILTIN_##UC_NAME,				\
 			BUILT_IN_MD, NULL, NULL_TREE)
 
-  ADD_RX_BUILTIN1 (BRK,     "brk",     void,  void);
+  ADD_RX_BUILTIN0 (BRK,     "brk",     void);
   ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
   ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
   ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
@@ -1949,18 +2406,29 @@
   ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
   ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
   ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
-  ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
-  ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
+  ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
+  ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
   ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
   ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
-  ADD_RX_BUILTIN1 (RMPA,    "rmpa",    void,  void);
+  ADD_RX_BUILTIN0 (RMPA,    "rmpa",    void);
   ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
   ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
   ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
   ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
   ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
   ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
-  ADD_RX_BUILTIN1 (WAIT,    "wait",    void,  void);
+  ADD_RX_BUILTIN0 (WAIT,    "wait",    void);
+}
+
+/* Return the RX builtin for CODE.  */
+
+static tree
+rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
+{
+  if (code >= RX_BUILTIN_max)
+    return error_mark_node;
+
+  return rx_builtins[code];
 }
 
 static rtx
@@ -2115,7 +2583,7 @@
 rx_expand_builtin (tree exp,
 		   rtx target,
 		   rtx subtarget ATTRIBUTE_UNUSED,
-		   enum machine_mode mode ATTRIBUTE_UNUSED,
+		   machine_mode mode ATTRIBUTE_UNUSED,
 		   int ignore ATTRIBUTE_UNUSED)
 {
   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
@@ -2148,7 +2616,12 @@
 	(op, gen_mvtachi, true);
     case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
 	(op, gen_mvtaclo, true);
-    case RX_BUILTIN_RMPA:    emit_insn (gen_rmpa ()); return NULL_RTX;
+    case RX_BUILTIN_RMPA:
+      if (rx_allow_string_insns)
+	emit_insn (gen_rmpa ());
+      else
+	error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
+      return NULL_RTX;
     case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
     case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
     case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
@@ -2213,12 +2686,11 @@
 static tree
 rx_handle_func_attribute (tree * node,
 			  tree   name,
-			  tree   args,
+			  tree   args ATTRIBUTE_UNUSED,
 			  int    flags ATTRIBUTE_UNUSED,
 			  bool * no_add_attrs)
 {
   gcc_assert (DECL_P (* node));
-  gcc_assert (args == NULL_TREE);
 
   if (TREE_CODE (* node) != FUNCTION_DECL)
     {
@@ -2234,73 +2706,44 @@
   return NULL_TREE;
 }
 
+/* Check "vector" attribute.  */
+
+static tree
+rx_handle_vector_attribute (tree * node,
+			    tree   name,
+			    tree   args,
+			    int    flags ATTRIBUTE_UNUSED,
+			    bool * no_add_attrs)
+{
+  gcc_assert (DECL_P (* node));
+  gcc_assert (args != NULL_TREE);
+
+  if (TREE_CODE (* node) != FUNCTION_DECL)
+    {
+      warning (OPT_Wattributes, "%qE attribute only applies to functions",
+	       name);
+      * no_add_attrs = true;
+    }
+
+  return NULL_TREE;
+}
+
 /* Table of RX specific attributes.  */
 const struct attribute_spec rx_attribute_table[] =
 {
-  /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler.  */
-  { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
-  { "interrupt",      0, 0, true, false, false, rx_handle_func_attribute },
-  { "naked",          0, 0, true, false, false, rx_handle_func_attribute },
-  { NULL,             0, 0, false, false, false, NULL }
+  /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
+     affects_type_identity.  */
+  { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
+    false },
+  { "interrupt",      0, -1, true, false, false, rx_handle_func_attribute,
+    false },
+  { "naked",          0, 0, true, false, false, rx_handle_func_attribute,
+    false },
+  { "vector",         1, -1, true, false, false, rx_handle_vector_attribute,
+    false },
+  { NULL,             0, 0, false, false, false, NULL, false }
 };
 
-/* Extra processing for target specific command line options.  */
-
-static bool
-rx_handle_option (size_t code, const char *  arg ATTRIBUTE_UNUSED, int value)
-{
-  switch (code)
-    {
-    case OPT_mint_register_:
-      switch (value)
-	{
-	case 4:
-	  fixed_regs[10] = call_used_regs [10] = 1;
-	  /* Fall through.  */
-	case 3:
-	  fixed_regs[11] = call_used_regs [11] = 1;
-	  /* Fall through.  */
-	case 2:
-	  fixed_regs[12] = call_used_regs [12] = 1;
-	  /* Fall through.  */
-	case 1:
-	  fixed_regs[13] = call_used_regs [13] = 1;
-	  /* Fall through.  */
-	case 0:
-	  return true;
-	default:
-	  return false;
-	}
-      break;
-
-    case OPT_mmax_constant_size_:
-      /* Make sure that the -mmax-constant_size option is in range.  */
-      return value >= 0 && value <= 4;
-
-    case OPT_mcpu_:
-      if (strcasecmp (arg, "RX610") == 0)
-	rx_cpu_type = RX610;
-      else if (strcasecmp (arg, "RX200") == 0)
-	{
-	  target_flags |= MASK_NO_USE_FPU;
-	  rx_cpu_type = RX200;
-	}
-      else if (strcasecmp (arg, "RX600") != 0)
-	warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
-      break;
-      
-    case OPT_fpu:
-      if (rx_cpu_type == RX200)
-	error ("the RX200 cpu does not have FPU hardware");
-      break;
-
-    default:
-      break;
-    }
-
-  return true;
-}
-
 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE.  */
 
 static void
@@ -2331,20 +2774,61 @@
 static void
 rx_option_override (void)
 {
+  unsigned int i;
+  cl_deferred_option *opt;
+  vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
+
+  if (v)
+    FOR_EACH_VEC_ELT (*v, i, opt)
+      {
+	switch (opt->opt_index)
+	  {
+	  case OPT_mint_register_:
+	    switch (opt->value)
+	      {
+	      case 4:
+		fixed_regs[10] = call_used_regs [10] = 1;
+		/* Fall through.  */
+	      case 3:
+		fixed_regs[11] = call_used_regs [11] = 1;
+		/* Fall through.  */
+	      case 2:
+		fixed_regs[12] = call_used_regs [12] = 1;
+		/* Fall through.  */
+	      case 1:
+		fixed_regs[13] = call_used_regs [13] = 1;
+		/* Fall through.  */
+	      case 0:
+		rx_num_interrupt_regs = opt->value;
+		break;
+	      default:
+		rx_num_interrupt_regs = 0;
+		/* Error message already given because rx_handle_option
+		  returned false.  */
+		break;
+	      }
+	    break;
+
+	  default:
+	    gcc_unreachable ();
+	  }
+      }
+
   /* This target defaults to strict volatile bitfields.  */
-  if (flag_strict_volatile_bitfields < 0)
+  if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
     flag_strict_volatile_bitfields = 1;
 
   rx_override_options_after_change ();
+
+  /* These values are bytes, not log.  */
+  if (align_jumps == 0 && ! optimize_size)
+    align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
+  if (align_loops == 0 && ! optimize_size)
+    align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
+  if (align_labels == 0 && ! optimize_size)
+    align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
 }
 
-/* Implement TARGET_OPTION_OPTIMIZATION_TABLE.  */
-static const struct default_options rx_option_optimization_table[] =
-  {
-    { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
-    { OPT_LEVELS_NONE, 0, NULL, 0 }
-  };
-
 
 static bool
 rx_allocate_stack_slots_for_args (void)
@@ -2361,12 +2845,23 @@
     &&   ! is_naked_func (decl);  
 }
 
+static bool
+rx_warn_func_return (tree decl)
+{
+  /* Naked functions are implemented entirely in assembly, including the
+     return sequence, so suppress warnings about this.  */
+  return !is_naked_func (decl);
+}
+
 /* Return nonzero if it is ok to make a tail-call to DECL,
    a function_decl or NULL if this is an indirect call, using EXP  */
 
 static bool
 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
 {
+  if (TARGET_JSR)
+    return false;
+
   /* Do not allow indirect tailcalls.  The
      sibcall patterns do not support them.  */
   if (decl == NULL)
@@ -2391,7 +2886,7 @@
 static bool
 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
 {
-  /* The packed attribute overrides the MS behaviour.  */
+  /* The packed attribute overrides the MS behavior.  */
   return ! TYPE_PACKED (record_type);
 }
 
@@ -2399,7 +2894,7 @@
    operand on the RX.  X is already known to satisfy CONSTANT_P.  */
 
 bool
-rx_is_legitimate_constant (rtx x)
+rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
 {
   switch (GET_CODE (x))
     {
@@ -2424,12 +2919,11 @@
 	  return true;
 
 	case UNSPEC:
-	  return XINT (x, 1) == UNSPEC_CONST;
+	  return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
 
 	default:
 	  /* FIXME: Can this ever happen ?  */
-	  abort ();
-	  return false;
+	  gcc_unreachable ();
 	}
       break;
       
@@ -2449,7 +2943,8 @@
 }
 
 static int
-rx_address_cost (rtx addr, bool speed)
+rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
+		 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
 {
   rtx a, b;
 
@@ -2570,27 +3065,29 @@
 }
 
 static int
-rx_memory_move_cost (enum machine_mode mode, reg_class_t regclass, bool in)
+rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
+		     reg_class_t regclass ATTRIBUTE_UNUSED,
+		     bool in)
 {
-  return 2 + memory_move_secondary_cost (mode, regclass, in);
+  return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
 }
 
 /* Convert a CC_MODE to the set of flags that it represents.  */
 
 static unsigned int
-flags_from_mode (enum machine_mode mode)
+flags_from_mode (machine_mode mode)
 {
   switch (mode)
     {
-    case CC_ZSmode:
+    case E_CC_ZSmode:
       return CC_FLAG_S | CC_FLAG_Z;
-    case CC_ZSOmode:
+    case E_CC_ZSOmode:
       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
-    case CC_ZSCmode:
+    case E_CC_ZSCmode:
       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
-    case CCmode:
+    case E_CCmode:
       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
-    case CC_Fmode:
+    case E_CC_Fmode:
       return CC_FLAG_FP;
     default:
       gcc_unreachable ();
@@ -2599,7 +3096,7 @@
 
 /* Convert a set of flags to a CC_MODE that can implement it.  */
 
-static enum machine_mode
+static machine_mode
 mode_from_flags (unsigned int f)
 {
   if (f & CC_FLAG_FP)
@@ -2647,8 +3144,8 @@
 
 /* Return a CC_MODE of which both M1 and M2 are subsets.  */
 
-static enum machine_mode
-rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
+static machine_mode
+rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
 {
   unsigned f;
 
@@ -2667,7 +3164,7 @@
 
 /* Return the minimal CC mode needed to implement (CMP_CODE X Y).  */
 
-enum machine_mode
+machine_mode
 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
 {
   if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
@@ -2683,33 +3180,33 @@
    CC_MODE, and use that in branches based on that compare.  */
 
 void
-rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
+rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
 		  rtx c1, rtx c2, rtx label)
 {
   rtx flags, x;
 
   flags = gen_rtx_REG (cc_mode, CC_REG);
   x = gen_rtx_COMPARE (cc_mode, c1, c2);
-  x = gen_rtx_SET (VOIDmode, flags, x);
+  x = gen_rtx_SET (flags, x);
   emit_insn (x);
 
   x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
   x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
-  x = gen_rtx_SET (VOIDmode, pc_rtx, x);
+  x = gen_rtx_SET (pc_rtx, x);
   emit_jump_insn (x);
 }
 
 /* A helper function for matching parallels that set the flags.  */
 
 bool
-rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
+rx_match_ccmode (rtx insn, machine_mode cc_mode)
 {
   rtx op1, flags;
-  enum machine_mode flags_mode;
+  machine_mode flags_mode;
 
   gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
 
-  op1 = XVECEXP (PATTERN (insn), 0, 1);
+  op1 = XVECEXP (PATTERN (insn), 0, 0);
   gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
 
   flags = SET_DEST (op1);
@@ -2726,8 +3223,259 @@
 
   return true;
 }
-
 
+int
+rx_align_for_label (rtx lab, int uses_threshold)
+{
+  /* This is a simple heuristic to guess when an alignment would not be useful
+     because the delay due to the inserted NOPs would be greater than the delay
+     due to the misaligned branch.  If uses_threshold is zero then the alignment
+     is always useful.  */
+  if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
+    return 0;
+
+  if (optimize_size)
+    return 0;
+  /* These values are log, not bytes.  */
+  if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
+    return 2; /* 4 bytes */
+  return 3;   /* 8 bytes */
+}
+
+static int
+rx_max_skip_for_label (rtx_insn *lab)
+{
+  int opsize;
+  rtx_insn *op;
+
+  if (optimize_size)
+    return 0;
+
+  if (lab == NULL)
+    return 0;
+
+  op = lab;
+  do
+    {
+      op = next_nonnote_nondebug_insn (op);
+    }
+  while (op && (LABEL_P (op)
+		|| (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
+  if (!op)
+    return 0;
+
+  opsize = get_attr_length (op);
+  if (opsize >= 0 && opsize < 8)
+    return opsize - 1;
+  return 0;
+}
+
+/* Compute the real length of the extending load-and-op instructions.  */
+
+int
+rx_adjust_insn_length (rtx_insn *insn, int current_length)
+{
+  rtx extend, mem, offset;
+  bool zero;
+  int factor;
+
+  if (!INSN_P (insn))
+    return current_length;
+
+  switch (INSN_CODE (insn))
+    {
+    default:
+      return current_length;
+
+    case CODE_FOR_plussi3_zero_extendhi:
+    case CODE_FOR_andsi3_zero_extendhi:
+    case CODE_FOR_iorsi3_zero_extendhi:
+    case CODE_FOR_xorsi3_zero_extendhi:
+    case CODE_FOR_divsi3_zero_extendhi:
+    case CODE_FOR_udivsi3_zero_extendhi:
+    case CODE_FOR_minussi3_zero_extendhi:
+    case CODE_FOR_smaxsi3_zero_extendhi:
+    case CODE_FOR_sminsi3_zero_extendhi:
+    case CODE_FOR_multsi3_zero_extendhi:
+    case CODE_FOR_comparesi3_zero_extendhi:
+      zero = true;
+      factor = 2;
+      break;
+
+    case CODE_FOR_plussi3_sign_extendhi:
+    case CODE_FOR_andsi3_sign_extendhi:
+    case CODE_FOR_iorsi3_sign_extendhi:
+    case CODE_FOR_xorsi3_sign_extendhi:
+    case CODE_FOR_divsi3_sign_extendhi:
+    case CODE_FOR_udivsi3_sign_extendhi:
+    case CODE_FOR_minussi3_sign_extendhi:
+    case CODE_FOR_smaxsi3_sign_extendhi:
+    case CODE_FOR_sminsi3_sign_extendhi:
+    case CODE_FOR_multsi3_sign_extendhi:
+    case CODE_FOR_comparesi3_sign_extendhi:
+      zero = false;
+      factor = 2;
+      break;
+      
+    case CODE_FOR_plussi3_zero_extendqi:
+    case CODE_FOR_andsi3_zero_extendqi:
+    case CODE_FOR_iorsi3_zero_extendqi:
+    case CODE_FOR_xorsi3_zero_extendqi:
+    case CODE_FOR_divsi3_zero_extendqi:
+    case CODE_FOR_udivsi3_zero_extendqi:
+    case CODE_FOR_minussi3_zero_extendqi:
+    case CODE_FOR_smaxsi3_zero_extendqi:
+    case CODE_FOR_sminsi3_zero_extendqi:
+    case CODE_FOR_multsi3_zero_extendqi:
+    case CODE_FOR_comparesi3_zero_extendqi:
+      zero = true;
+      factor = 1;
+      break;
+      
+    case CODE_FOR_plussi3_sign_extendqi:
+    case CODE_FOR_andsi3_sign_extendqi:
+    case CODE_FOR_iorsi3_sign_extendqi:
+    case CODE_FOR_xorsi3_sign_extendqi:
+    case CODE_FOR_divsi3_sign_extendqi:
+    case CODE_FOR_udivsi3_sign_extendqi:
+    case CODE_FOR_minussi3_sign_extendqi:
+    case CODE_FOR_smaxsi3_sign_extendqi:
+    case CODE_FOR_sminsi3_sign_extendqi:
+    case CODE_FOR_multsi3_sign_extendqi:
+    case CODE_FOR_comparesi3_sign_extendqi:
+      zero = false;
+      factor = 1;
+      break;
+    }      
+
+  /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))).  */
+  extend = single_set (insn);
+  gcc_assert (extend != NULL_RTX);
+
+  extend = SET_SRC (extend);
+  if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
+      || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
+    extend = XEXP (extend, 0);
+  else
+    extend = XEXP (extend, 1);
+
+  gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
+	      || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
+    
+  mem = XEXP (extend, 0);
+  gcc_checking_assert (MEM_P (mem));
+  if (REG_P (XEXP (mem, 0)))
+    return (zero && factor == 1) ? 2 : 3;
+
+  /* We are expecting: (MEM (PLUS (REG) (CONST_INT))).  */
+  gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
+  gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
+
+  offset = XEXP (XEXP (mem, 0), 1);
+  gcc_checking_assert (GET_CODE (offset) == CONST_INT);
+
+  if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
+    return (zero && factor == 1) ? 3 : 4;
+
+  return (zero && factor == 1) ? 4 : 5;
+}
+
+static bool
+rx_narrow_volatile_bitfield (void)
+{
+  return true;
+}
+
+static bool
+rx_ok_to_inline (tree caller, tree callee)
+{
+  /* Do not inline functions with local variables
+     into a naked CALLER - naked function have no stack frame and
+     locals need a frame in order to have somewhere to live.
+
+     Unfortunately we have no way to determine the presence of
+     local variables in CALLEE, so we have to be cautious and
+     assume that there might be some there.
+
+     We do allow inlining when CALLEE has the "inline" type
+     modifier or the "always_inline" or "gnu_inline" attributes.  */
+  return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
+    || DECL_DECLARED_INLINE_P (callee)
+    || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
+    || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
+}
+
+static bool
+rx_enable_lra (void)
+{
+  return TARGET_ENABLE_LRA;
+}
+
+rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
+{
+  if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
+    {
+      /* If we are inside an interrupt handler, assume that interrupts are
+	 off -- which is the default hardware behavior.  In this case, there
+	 is no need to disable the interrupts.  */
+      m_prev_psw_reg = NULL;
+    }
+  else
+    {
+      m_prev_psw_reg = gen_reg_rtx (SImode);
+      emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
+      emit_insn (gen_clrpsw (GEN_INT ('I')));
+    }
+}
+
+rx_atomic_sequence::~rx_atomic_sequence (void)
+{
+  if (m_prev_psw_reg != NULL)
+    emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
+}
+
+/* Implement TARGET_HARD_REGNO_NREGS.  */
+
+static unsigned int
+rx_hard_regno_nregs (unsigned int, machine_mode mode)
+{
+  return CLASS_MAX_NREGS (0, mode);
+}
+
+/* Implement TARGET_HARD_REGNO_MODE_OK.  */
+
+static bool
+rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
+{
+  return REGNO_REG_CLASS (regno) == GR_REGS;
+}
+
+/* Implement TARGET_MODES_TIEABLE_P.  */
+
+static bool
+rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
+{
+  return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
+	   || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
+	  == (GET_MODE_CLASS (mode2) == MODE_FLOAT
+	      || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
+}
+
+#undef  TARGET_NARROW_VOLATILE_BITFIELD
+#define TARGET_NARROW_VOLATILE_BITFIELD		rx_narrow_volatile_bitfield
+
+#undef  TARGET_CAN_INLINE_P
+#define TARGET_CAN_INLINE_P			rx_ok_to_inline
+
+#undef  TARGET_ASM_JUMP_ALIGN_MAX_SKIP
+#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP			rx_max_skip_for_label
+#undef  TARGET_ASM_LOOP_ALIGN_MAX_SKIP
+#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP			rx_max_skip_for_label
+#undef  TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
+#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP	rx_max_skip_for_label
+#undef  TARGET_ASM_LABEL_ALIGN_MAX_SKIP
+#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP			rx_max_skip_for_label
+
 #undef  TARGET_FUNCTION_VALUE
 #define TARGET_FUNCTION_VALUE		rx_function_value
 
@@ -2752,6 +3500,9 @@
 #undef  TARGET_INIT_BUILTINS
 #define TARGET_INIT_BUILTINS		rx_init_builtins
 
+#undef  TARGET_BUILTIN_DECL
+#define TARGET_BUILTIN_DECL		rx_builtin_decl
+
 #undef  TARGET_EXPAND_BUILTIN
 #define TARGET_EXPAND_BUILTIN		rx_expand_builtin
 
@@ -2803,9 +3554,6 @@
 #undef  TARGET_SET_CURRENT_FUNCTION
 #define TARGET_SET_CURRENT_FUNCTION		rx_set_current_function
 
-#undef  TARGET_HANDLE_OPTION
-#define TARGET_HANDLE_OPTION			rx_handle_option
-
 #undef  TARGET_ASM_INTEGER
 #define TARGET_ASM_INTEGER			rx_assemble_integer
 
@@ -2845,21 +3593,35 @@
 #undef  TARGET_OPTION_OVERRIDE
 #define TARGET_OPTION_OVERRIDE			rx_option_override
 
-#undef  TARGET_OPTION_OPTIMIZATION_TABLE
-#define TARGET_OPTION_OPTIMIZATION_TABLE	rx_option_optimization_table
-
 #undef  TARGET_PROMOTE_FUNCTION_MODE
 #define TARGET_PROMOTE_FUNCTION_MODE		rx_promote_function_mode
 
 #undef  TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE	rx_override_options_after_change
 
-#undef  TARGET_EXCEPT_UNWIND_INFO
-#define TARGET_EXCEPT_UNWIND_INFO		sjlj_except_unwind_info
-
 #undef  TARGET_FLAGS_REGNUM
 #define TARGET_FLAGS_REGNUM			CC_REG
 
+#undef  TARGET_LEGITIMATE_CONSTANT_P
+#define TARGET_LEGITIMATE_CONSTANT_P		rx_is_legitimate_constant
+
+#undef  TARGET_LEGITIMIZE_ADDRESS
+#define TARGET_LEGITIMIZE_ADDRESS		rx_legitimize_address
+
+#undef  TARGET_WARN_FUNC_RETURN
+#define TARGET_WARN_FUNC_RETURN 		rx_warn_func_return
+
+#undef  TARGET_LRA_P
+#define TARGET_LRA_P 				rx_enable_lra
+
+#undef  TARGET_HARD_REGNO_NREGS
+#define TARGET_HARD_REGNO_NREGS			rx_hard_regno_nregs
+#undef  TARGET_HARD_REGNO_MODE_OK
+#define TARGET_HARD_REGNO_MODE_OK		rx_hard_regno_mode_ok
+
+#undef  TARGET_MODES_TIEABLE_P
+#define TARGET_MODES_TIEABLE_P			rx_modes_tieable_p
+
 struct gcc_target targetm = TARGET_INITIALIZER;
 
-/* #include "gt-rx.h" */
+#include "gt-rx.h"