diff gcc/config/v850/v850.md @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 77e2b8dfacca
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gcc/config/v850/v850.md	Fri Jul 17 14:47:48 2009 +0900
@@ -0,0 +1,1970 @@
+;; GCC machine description for NEC V850
+;; Copyright (C) 1996, 1997, 1998, 1999, 2002, 2004, 2005, 2007, 2008
+;; Free Software Foundation, Inc.
+;; Contributed by Jeff Law (law@cygnus.com).
+
+;; This file is part of GCC.
+
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 3, or (at your option)
+;; any later version.
+
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING3.  If not see
+;; <http://www.gnu.org/licenses/>.
+
+;; The original PO technology requires these to be ordered by speed,
+;; so that assigner will pick the fastest.
+
+;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
+
+;; The V851 manual states that the instruction address space is 16M;
+;; the various branch/call instructions only have a 22bit offset (4M range).
+;;
+;; One day we'll probably need to handle calls to targets more than 4M
+;; away.
+
+;; The size of instructions in bytes.
+
+(define_attr "length" ""
+  (const_int 4))
+
+(define_attr "long_calls" "yes,no"
+  (const (if_then_else (symbol_ref "TARGET_LONG_CALLS")
+		       (const_string "yes")
+		       (const_string "no"))))
+	    
+;; Types of instructions (for scheduling purposes).
+
+(define_attr "type" "load,mult,other"
+  (const_string "other"))
+
+;; Condition code settings.
+;; none - insn does not affect cc
+;; none_0hit - insn does not affect cc but it does modify operand 0
+;;	This attribute is used to keep track of when operand 0 changes.
+;; 	See the description of NOTICE_UPDATE_CC for more info.
+;; set_znv - sets z,n,v to usable values; c is unknown.
+;; set_zn  - sets z,n to usable values; v,c is unknown.
+;; compare - compare instruction
+;; clobber - value of cc is unknown
+(define_attr "cc" "none,none_0hit,set_zn,set_znv,compare,clobber"
+  (const_string "clobber"))
+
+;; Function units for the V850.  As best as I can tell, there's
+;; a traditional memory load/use stall as well as a stall if
+;; the result of a multiply is used too early.
+
+(define_insn_reservation "v850_other" 1
+			 (eq_attr "type" "other")
+			 "nothing")
+(define_insn_reservation "v850_mult" 2
+			 (eq_attr "type" "mult")
+			 "nothing")
+(define_insn_reservation "v850_memory" 2
+			 (eq_attr "type" "load")
+			 "nothing")
+
+(include "predicates.md")
+
+;; ----------------------------------------------------------------------
+;; MOVE INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+;; movqi
+
+(define_expand "movqi"
+  [(set (match_operand:QI 0 "general_operand" "")
+	(match_operand:QI 1 "general_operand" ""))]
+  ""
+  "
+{
+  /* One of the ops has to be in a register or 0 */
+  if (!register_operand (operand0, QImode)
+      && !reg_or_0_operand (operand1, QImode))
+    operands[1] = copy_to_mode_reg (QImode, operand1);
+}")
+
+(define_insn "*movqi_internal"
+  [(set (match_operand:QI 0 "general_operand" "=r,r,r,Q,r,m,m")
+	(match_operand:QI 1 "general_operand" "Jr,n,Q,Ir,m,r,I"))]
+  "register_operand (operands[0], QImode)
+   || reg_or_0_operand (operands[1], QImode)"
+  "* return output_move_single (operands);"
+  [(set_attr "length" "2,4,2,2,4,4,4")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit")
+   (set_attr "type" "other,other,load,other,load,other,other")])
+
+;; movhi
+
+(define_expand "movhi"
+  [(set (match_operand:HI 0 "general_operand" "")
+	(match_operand:HI 1 "general_operand" ""))]
+  ""
+  "
+{
+  /* One of the ops has to be in a register or 0 */
+  if (!register_operand (operand0, HImode)
+      && !reg_or_0_operand (operand1, HImode))
+    operands[1] = copy_to_mode_reg (HImode, operand1);
+}")
+
+(define_insn "*movhi_internal"
+  [(set (match_operand:HI 0 "general_operand" "=r,r,r,Q,r,m,m")
+	(match_operand:HI 1 "general_operand" "Jr,n,Q,Ir,m,r,I"))]
+  "register_operand (operands[0], HImode)
+   || reg_or_0_operand (operands[1], HImode)"
+  "* return output_move_single (operands);"
+  [(set_attr "length" "2,4,2,2,4,4,4")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit")
+   (set_attr "type" "other,other,load,other,load,other,other")])
+
+;; movsi and helpers
+
+(define_insn "*movsi_high"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(high:SI (match_operand 1 "" "")))]
+  ""
+  "movhi hi(%1),%.,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")
+   (set_attr "type" "other")])
+
+(define_insn "*movsi_lo"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(lo_sum:SI (match_operand:SI 1 "register_operand" "r")
+		   (match_operand:SI 2 "immediate_operand" "i")))]
+  ""
+  "movea lo(%2),%1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")
+   (set_attr "type" "other")])
+
+(define_expand "movsi"
+  [(set (match_operand:SI 0 "general_operand" "")
+	(match_operand:SI 1 "general_operand" ""))]
+  ""
+  "
+{
+  /* One of the ops has to be in a register or 0 */
+  if (!register_operand (operand0, SImode)
+      && !reg_or_0_operand (operand1, SImode))
+    operands[1] = copy_to_mode_reg (SImode, operand1);
+
+  /* Some constants, as well as symbolic operands
+     must be done with HIGH & LO_SUM patterns.  */
+  if (CONSTANT_P (operands[1])
+      && GET_CODE (operands[1]) != HIGH
+      && ! TARGET_V850E
+      && !special_symbolref_operand (operands[1], VOIDmode)
+      && !(GET_CODE (operands[1]) == CONST_INT
+	   && (CONST_OK_FOR_J (INTVAL (operands[1]))
+	       || CONST_OK_FOR_K (INTVAL (operands[1]))
+	       || CONST_OK_FOR_L (INTVAL (operands[1])))))
+    {
+      rtx temp;
+
+      if (reload_in_progress || reload_completed)
+        temp = operands[0];
+      else
+	temp = gen_reg_rtx (SImode);
+
+      emit_insn (gen_rtx_SET (SImode, temp,
+			      gen_rtx_HIGH (SImode, operand1)));
+      emit_insn (gen_rtx_SET (SImode, operand0,
+			      gen_rtx_LO_SUM (SImode, temp, operand1)));
+      DONE;
+    }
+}")
+
+;; This is the same as the following pattern, except that it includes
+;; support for arbitrary 32-bit immediates.
+
+;; ??? This always loads addresses using hilo.  If the only use of this address
+;; was in a load/store, then we would get smaller code if we only loaded the
+;; upper part with hi, and then put the lower part in the load/store insn.
+
+(define_insn "*movsi_internal_v850e"
+  [(set (match_operand:SI 0 "general_operand" "=r,r,r,r,Q,r,r,m,m,r")
+	(match_operand:SI 1 "general_operand" "Jr,K,L,Q,Ir,m,R,r,I,i"))]
+  "TARGET_V850E
+   && (register_operand (operands[0], SImode)
+       || reg_or_0_operand (operands[1], SImode))"
+  "* return output_move_single (operands);"
+  [(set_attr "length" "2,4,4,2,2,4,4,4,4,6")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit")
+   (set_attr "type" "other,other,other,load,other,load,other,other,other,other")])
+
+(define_insn "*movsi_internal"
+  [(set (match_operand:SI 0 "general_operand" "=r,r,r,r,Q,r,r,m,m")
+	(match_operand:SI 1 "movsi_source_operand" "Jr,K,L,Q,Ir,m,R,r,I"))]
+  "register_operand (operands[0], SImode)
+   || reg_or_0_operand (operands[1], SImode)"
+  "* return output_move_single (operands);"
+  [(set_attr "length" "2,4,4,2,2,4,4,4,4")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit")
+   (set_attr "type" "other,other,other,load,other,load,other,other,other")])
+
+(define_insn "*movsf_internal"
+  [(set (match_operand:SF 0 "general_operand" "=r,r,r,r,r,Q,r,m,m,r")
+	(match_operand:SF 1 "general_operand" "Jr,K,L,n,Q,Ir,m,r,IG,iF"))]
+  "register_operand (operands[0], SFmode)
+   || reg_or_0_operand (operands[1], SFmode)"
+  "* return output_move_single (operands);"
+  [(set_attr "length" "2,4,4,8,2,2,4,4,4,8")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit,none_0hit")
+   (set_attr "type" "other,other,other,other,load,other,load,other,other,other")])
+
+
+;; ----------------------------------------------------------------------
+;; TEST INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "*v850_tst1"
+  [(set (cc0) (zero_extract:SI (match_operand:QI 0 "memory_operand" "m")
+                               (const_int 1)
+                               (match_operand:QI 1 "const_int_operand" "n")))]
+  ""
+  "tst1 %1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+;; This replaces ld.b;sar;andi with tst1;setf nz.
+
+;; ??? The zero_extract sets the Z bit to the opposite of what one would
+;; expect.  This perhaps should be wrapped in a (eq: X (const_int 0)).
+
+(define_split
+  [(set (match_operand:SI 0 "register_operand" "")
+	(zero_extract:SI (match_operand:QI 1 "memory_operand" "")
+			 (const_int 1)
+			 (match_operand 2 "const_int_operand" "")))]
+  ""
+  [(set (cc0) (zero_extract:SI (match_dup 1)
+			       (const_int 1)
+			       (match_dup 2)))
+   (set (match_dup 0) (ne:SI (cc0) (const_int 0)))])
+
+(define_insn "tstsi"
+  [(set (cc0) (match_operand:SI 0 "register_operand" "r"))]
+  ""
+  "cmp %.,%0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "set_znv")])
+
+(define_insn "cmpsi"
+  [(set (cc0)
+	(compare (match_operand:SI 0 "register_operand" "r,r")
+		 (match_operand:SI 1 "reg_or_int5_operand" "r,J")))]
+  ""
+  "@
+  cmp %1,%0
+  cmp %1,%0"
+  [(set_attr "length" "2,2")
+   (set_attr "cc" "compare")])
+
+;; ----------------------------------------------------------------------
+;; ADD INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "addsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+	(plus:SI (match_operand:SI 1 "register_operand" "%0,r,r")
+		 (match_operand:SI 2 "nonmemory_operand" "rJ,K,U")))]
+  ""
+  "@
+   add %2,%0
+   addi %2,%1,%0
+   addi %O2(%P2),%1,%0"
+  [(set_attr "length" "2,4,4")
+   (set_attr "cc" "set_zn,set_zn,set_zn")])
+
+;; ----------------------------------------------------------------------
+;; SUBTRACT INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "subsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r")
+	(minus:SI (match_operand:SI 1 "register_operand" "0,r")
+		  (match_operand:SI 2 "register_operand" "r,0")))]
+  ""
+  "@
+  sub %2,%0
+  subr %1,%0"
+  [(set_attr "length" "2,2")
+   (set_attr "cc" "set_zn")])
+
+(define_insn "negsi2"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (neg:SI (match_operand:SI 1 "register_operand" "0")))]
+  ""
+  "subr %.,%0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "set_zn")])
+
+;; ----------------------------------------------------------------------
+;; MULTIPLY INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_expand "mulhisi3"
+  [(set (match_operand:SI 0 "register_operand" "")
+	(mult:SI
+	  (sign_extend:SI (match_operand:HI 1 "register_operand" ""))
+	  (sign_extend:SI (match_operand:HI 2 "nonmemory_operand" ""))))]
+  ""
+  "if (GET_CODE (operands[2]) == CONST_INT)
+     {
+       emit_insn (gen_mulhisi3_internal2 (operands[0], operands[1], operands[2]));
+       DONE;
+     }")
+
+(define_insn "*mulhisi3_internal1"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(mult:SI
+	  (sign_extend:SI (match_operand:HI 1 "register_operand" "%0"))
+	  (sign_extend:SI (match_operand:HI 2 "register_operand" "r"))))]
+  ""
+  "mulh %2,%0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none_0hit")
+   (set_attr "type" "mult")])
+
+(define_insn "mulhisi3_internal2"
+  [(set (match_operand:SI 0 "register_operand" "=r,r")
+	(mult:SI
+	  (sign_extend:SI (match_operand:HI 1 "register_operand" "%0,r"))
+	  (match_operand:HI 2 "const_int_operand" "J,K")))]
+  ""
+  "@
+   mulh %2,%0
+   mulhi %2,%1,%0"
+  [(set_attr "length" "2,4")
+   (set_attr "cc" "none_0hit,none_0hit")
+   (set_attr "type" "mult")])
+
+;; ??? The scheduling info is probably wrong.
+
+;; ??? This instruction can also generate the 32-bit highpart, but using it
+;; may increase code size counter to the desired result.
+
+;; ??? This instructions can also give a DImode result.
+
+;; ??? There is unsigned version, but it matters only for the DImode/highpart
+;; results.
+
+(define_insn "mulsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(mult:SI (match_operand:SI 1 "register_operand" "%0")
+		 (match_operand:SI 2 "reg_or_int9_operand" "rO")))]
+  "TARGET_V850E"
+  "mul %2,%1,%."
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")
+   (set_attr "type" "mult")])
+
+;; ----------------------------------------------------------------------
+;; DIVIDE INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+;; ??? These insns do set the Z/N condition codes, except that they are based
+;; on only one of the two results, so it doesn't seem to make sense to use
+;; them.
+
+;; ??? The scheduling info is probably wrong.
+
+(define_insn "divmodsi4"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(div:SI (match_operand:SI 1 "register_operand" "0")
+		(match_operand:SI 2 "register_operand" "r")))
+   (set (match_operand:SI 3 "register_operand" "=r")
+	(mod:SI (match_dup 1)
+		(match_dup 2)))]
+  "TARGET_V850E"
+  "div %2,%0,%3"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")
+   (set_attr "type" "other")])
+	
+(define_insn "udivmodsi4"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(udiv:SI (match_operand:SI 1 "register_operand" "0")
+		 (match_operand:SI 2 "register_operand" "r")))
+   (set (match_operand:SI 3 "register_operand" "=r")
+	(umod:SI (match_dup 1)
+		 (match_dup 2)))]
+  "TARGET_V850E"
+  "divu %2,%0,%3"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")
+   (set_attr "type" "other")])
+	
+;; ??? There is a 2 byte instruction for generating only the quotient.
+;; However, it isn't clear how to compute the length field correctly.
+
+(define_insn "divmodhi4"
+  [(set (match_operand:HI 0 "register_operand" "=r")
+	(div:HI (match_operand:HI 1 "register_operand" "0")
+		(match_operand:HI 2 "register_operand" "r")))
+   (set (match_operand:HI 3 "register_operand" "=r")
+	(mod:HI (match_dup 1)
+		(match_dup 2)))]
+  "TARGET_V850E"
+  "divh %2,%0,%3"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")
+   (set_attr "type" "other")])
+
+;; Half-words are sign-extended by default, so we must zero extend to a word
+;; here before doing the divide.
+
+(define_insn "udivmodhi4"
+  [(set (match_operand:HI 0 "register_operand" "=r")
+	(udiv:HI (match_operand:HI 1 "register_operand" "0")
+		 (match_operand:HI 2 "register_operand" "r")))
+   (set (match_operand:HI 3 "register_operand" "=r")
+	(umod:HI (match_dup 1)
+		 (match_dup 2)))]
+  "TARGET_V850E"
+  "zxh %0 ; divhu %2,%0,%3"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")
+   (set_attr "type" "other")])
+
+;; ----------------------------------------------------------------------
+;; AND INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "*v850_clr1_1"
+  [(set (match_operand:QI 0 "memory_operand" "=m")
+	(subreg:QI
+	  (and:SI (subreg:SI (match_dup 0) 0)
+		  (match_operand:QI 1 "not_power_of_two_operand" "")) 0))]
+  ""
+  "*
+{
+  rtx xoperands[2];
+  xoperands[0] = operands[0];
+  xoperands[1] = GEN_INT (~INTVAL (operands[1]) & 0xff);
+  output_asm_insn (\"clr1 %M1,%0\", xoperands);
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*v850_clr1_2"
+  [(set (match_operand:HI 0 "indirect_operand" "=m")
+	(subreg:HI
+	  (and:SI (subreg:SI (match_dup 0) 0)
+		  (match_operand:HI 1 "not_power_of_two_operand" "")) 0))]
+  ""
+  "*
+{
+  int log2 = exact_log2 (~INTVAL (operands[1]) & 0xffff);
+
+  rtx xoperands[2];
+  xoperands[0] = gen_rtx_MEM (QImode,
+			      plus_constant (XEXP (operands[0], 0), log2 / 8));
+  xoperands[1] = GEN_INT (log2 % 8);
+  output_asm_insn (\"clr1 %1,%0\", xoperands);
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*v850_clr1_3"
+  [(set (match_operand:SI 0 "indirect_operand" "=m")
+	(and:SI (match_dup 0)
+		(match_operand:SI 1 "not_power_of_two_operand" "")))]
+  ""
+  "*
+{
+  int log2 = exact_log2 (~INTVAL (operands[1]) & 0xffffffff);
+
+  rtx xoperands[2];
+  xoperands[0] = gen_rtx_MEM (QImode,
+			      plus_constant (XEXP (operands[0], 0), log2 / 8));
+  xoperands[1] = GEN_INT (log2 % 8);
+  output_asm_insn (\"clr1 %1,%0\", xoperands);
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "andsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+	(and:SI (match_operand:SI 1 "register_operand" "%0,0,r")
+		(match_operand:SI 2 "nonmemory_operand" "r,I,M")))]
+  ""
+  "@
+  and %2,%0
+  and %.,%0
+  andi %2,%1,%0"
+  [(set_attr "length" "2,2,4")
+   (set_attr "cc" "set_znv")])
+
+;; ----------------------------------------------------------------------
+;; OR INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "*v850_set1_1"
+  [(set (match_operand:QI 0 "memory_operand" "=m")
+	(subreg:QI (ior:SI (subreg:SI (match_dup 0) 0)
+			   (match_operand 1 "power_of_two_operand" "")) 0))]
+  ""
+  "set1 %M1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*v850_set1_2"
+  [(set (match_operand:HI 0 "indirect_operand" "=m")
+	(subreg:HI (ior:SI (subreg:SI (match_dup 0) 0)
+			   (match_operand 1 "power_of_two_operand" "")) 0))]
+  ""
+  "*
+{
+  int log2 = exact_log2 (INTVAL (operands[1]));
+
+  if (log2 < 8)
+    return \"set1 %M1,%0\";
+  else
+    {
+      rtx xoperands[2];
+      xoperands[0] = gen_rtx_MEM (QImode,
+				  plus_constant (XEXP (operands[0], 0),
+						 log2 / 8));
+      xoperands[1] = GEN_INT (log2 % 8);
+      output_asm_insn (\"set1 %1,%0\", xoperands);
+    }
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*v850_set1_3"
+  [(set (match_operand:SI 0 "indirect_operand" "=m")
+	(ior:SI (match_dup 0)
+		(match_operand 1 "power_of_two_operand" "")))]
+  ""
+  "*
+{
+  int log2 = exact_log2 (INTVAL (operands[1]));
+
+  if (log2 < 8)
+    return \"set1 %M1,%0\";
+  else
+    {
+      rtx xoperands[2];
+      xoperands[0] = gen_rtx_MEM (QImode,
+				  plus_constant (XEXP (operands[0], 0),
+						 log2 / 8));
+      xoperands[1] = GEN_INT (log2 % 8);
+      output_asm_insn (\"set1 %1,%0\", xoperands);
+    }
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "iorsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+	(ior:SI (match_operand:SI 1 "register_operand" "%0,0,r")
+		(match_operand:SI 2 "nonmemory_operand" "r,I,M")))]
+  ""
+  "@
+  or %2,%0
+  or %.,%0
+  ori %2,%1,%0"
+  [(set_attr "length" "2,2,4")
+   (set_attr "cc" "set_znv")])
+
+;; ----------------------------------------------------------------------
+;; XOR INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "*v850_not1_1"
+  [(set (match_operand:QI 0 "memory_operand" "=m")
+	(subreg:QI (xor:SI (subreg:SI (match_dup 0) 0)
+			   (match_operand 1 "power_of_two_operand" "")) 0))]
+  ""
+  "not1 %M1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*v850_not1_2"
+  [(set (match_operand:HI 0 "indirect_operand" "=m")
+	(subreg:HI (xor:SI (subreg:SI (match_dup 0) 0)
+			   (match_operand 1 "power_of_two_operand" "")) 0))]
+  ""
+  "*
+{
+  int log2 = exact_log2 (INTVAL (operands[1]));
+
+  if (log2 < 8)
+    return \"not1 %M1,%0\";
+  else
+    {
+      rtx xoperands[2];
+      xoperands[0] = gen_rtx_MEM (QImode,
+				  plus_constant (XEXP (operands[0], 0),
+						 log2 / 8));
+      xoperands[1] = GEN_INT (log2 % 8);
+      output_asm_insn (\"not1 %1,%0\", xoperands);
+    }
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*v850_not1_3"
+  [(set (match_operand:SI 0 "indirect_operand" "=m")
+	(xor:SI (match_dup 0)
+		(match_operand 1 "power_of_two_operand" "")))]
+  ""
+  "*
+{
+  int log2 = exact_log2 (INTVAL (operands[1]));
+
+  if (log2 < 8)
+    return \"not1 %M1,%0\";
+  else
+    {
+      rtx xoperands[2];
+      xoperands[0] = gen_rtx_MEM (QImode,
+				  plus_constant (XEXP (operands[0], 0),
+						 log2 / 8));
+      xoperands[1] = GEN_INT (log2 % 8);
+      output_asm_insn (\"not1 %1,%0\", xoperands);
+    }
+  return \"\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_insn "xorsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+	(xor:SI (match_operand:SI 1 "register_operand" "%0,0,r")
+		(match_operand:SI 2 "nonmemory_operand" "r,I,M")))]
+  ""
+  "@
+  xor %2,%0
+  xor %.,%0
+  xori %2,%1,%0"
+  [(set_attr "length" "2,2,4")
+   (set_attr "cc" "set_znv")])
+
+;; ----------------------------------------------------------------------
+;; NOT INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn "one_cmplsi2"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(not:SI (match_operand:SI 1 "register_operand" "r")))]
+  ""
+  "not %1,%0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "set_znv")])
+
+;; -----------------------------------------------------------------
+;; BIT FIELDS
+;; -----------------------------------------------------------------
+
+;; ??? Is it worth defining insv and extv for the V850 series?!?
+
+;; An insv pattern would be useful, but does not get used because
+;; store_bit_field never calls insv when storing a constant value into a
+;; single-bit bitfield.
+
+;; extv/extzv patterns would be useful, but do not get used because
+;; optimize_bitfield_compare in fold-const usually converts single
+;; bit extracts into an AND with a mask.
+
+;; -----------------------------------------------------------------
+;; Scc INSTRUCTIONS
+;; -----------------------------------------------------------------
+
+(define_insn "sle"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (le:SI (cc0) (const_int 0)))]
+  ""
+  "*
+{
+  if ((cc_status.flags & CC_OVERFLOW_UNUSABLE) != 0)
+    return 0;
+
+  return \"setf le,%0\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sleu"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (leu:SI (cc0) (const_int 0)))]
+  ""
+  "setf nh,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sge"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (ge:SI (cc0) (const_int 0)))]
+  ""
+  "*
+{
+  if ((cc_status.flags & CC_OVERFLOW_UNUSABLE) != 0)
+    return 0;
+
+  return \"setf ge,%0\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sgeu"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (geu:SI (cc0) (const_int 0)))]
+  ""
+  "setf nl,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "slt"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (lt:SI (cc0) (const_int 0)))]
+  ""
+  "*
+{
+  if ((cc_status.flags & CC_OVERFLOW_UNUSABLE) != 0)
+    return 0;
+
+  return \"setf lt,%0\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sltu"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (ltu:SI (cc0) (const_int 0)))]
+  ""
+  "setf l,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sgt"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (gt:SI (cc0) (const_int 0)))]
+  ""
+  "*
+{
+  if ((cc_status.flags & CC_OVERFLOW_UNUSABLE) != 0)
+    return 0;
+
+  return \"setf gt,%0\";
+}"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sgtu"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (gtu:SI (cc0) (const_int 0)))]
+  ""
+  "setf h,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "seq"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (eq:SI (cc0) (const_int 0)))]
+  ""
+  "setf z,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+(define_insn "sne"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+        (ne:SI (cc0) (const_int 0)))]
+  ""
+  "setf nz,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "none_0hit")])
+
+;; ----------------------------------------------------------------------
+;; CONDITIONAL MOVE INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+;; Instructions using cc0 aren't allowed to have input reloads, so we must
+;; hide the fact that this instruction uses cc0.  We do so by including the
+;; compare instruction inside it.
+
+;; ??? This is very ugly.  The right way to do this is to modify cmpsi so
+;; that it doesn't emit RTL, and then modify the bcc/scc patterns so that
+;; they emit RTL for the compare instruction.  Unfortunately, this requires
+;; lots of changes that will be hard to sanitize.  So for now, cmpsi still
+;; emits RTL, and I get the compare operands here from the previous insn.
+
+(define_expand "movsicc"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(if_then_else:SI
+	 (match_operator 1 "comparison_operator"
+			 [(match_dup 4) (match_dup 5)])
+	 (match_operand:SI 2 "reg_or_const_operand" "rJ")
+	 (match_operand:SI 3 "reg_or_const_operand" "rI")))]
+  "TARGET_V850E"
+  "
+{
+  rtx insn = get_last_insn_anywhere ();
+  rtx src;
+
+  if (   (GET_CODE (operands[2]) == CONST_INT
+       && GET_CODE (operands[3]) == CONST_INT))
+    {
+      int o2 = INTVAL (operands[2]);
+      int o3 = INTVAL (operands[3]);
+
+      if (o2 == 1 && o3 == 0)
+	FAIL;   /* setf */
+      if (o3 == 1 && o2 == 0)
+	FAIL;   /* setf */
+      if (o2 == 0 && (o3 < -16 || o3 > 15) && exact_log2 (o3) >= 0)
+	FAIL;   /* setf + shift */
+      if (o3 == 0 && (o2 < -16 || o2 > 15) && exact_log2 (o2) >=0)
+	FAIL;   /* setf + shift */
+      if (o2 != 0)
+	operands[2] = copy_to_mode_reg (SImode, operands[2]);
+      if (o3 !=0 )
+	operands[3] = copy_to_mode_reg (SImode, operands[3]);
+    }
+  else
+    {
+      if (GET_CODE (operands[2]) != REG)
+	operands[2] = copy_to_mode_reg (SImode,operands[2]);
+      if (GET_CODE (operands[3]) != REG)
+	operands[3] = copy_to_mode_reg (SImode, operands[3]);
+    }
+  gcc_assert (GET_CODE (insn) == INSN
+	      && GET_CODE (PATTERN (insn)) == SET
+	      && SET_DEST (PATTERN (insn)) == cc0_rtx);
+    
+  src = SET_SRC (PATTERN (insn));
+
+  switch (GET_CODE (src))
+    {
+    case COMPARE:
+      operands[4] = XEXP (src, 0);
+      operands[5] = XEXP (src, 1);
+      break;
+
+    case REG:
+    case SUBREG:
+      operands[4] = src;
+      operands[5] = const0_rtx;
+      break;
+
+    default:
+      gcc_unreachable ();
+    }
+}")
+
+;; ??? Clobbering the condition codes is overkill.
+
+;; ??? We sometimes emit an unnecessary compare instruction because the
+;; condition codes may have already been set by an earlier instruction,
+;; but we have no code here to avoid the compare if it is unnecessary.
+
+(define_insn "*movsicc_normal"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(if_then_else:SI
+	 (match_operator 1 "comparison_operator"
+			 [(match_operand:SI 4 "register_operand" "r")
+			  (match_operand:SI 5 "reg_or_int5_operand" "rJ")])
+	 (match_operand:SI 2 "reg_or_int5_operand" "rJ")
+	 (match_operand:SI 3 "reg_or_0_operand" "rI")))]
+  "TARGET_V850E"
+  "cmp %5,%4 ; cmov %c1,%2,%z3,%0"
+  [(set_attr "length" "6")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*movsicc_reversed"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(if_then_else:SI
+	 (match_operator 1 "comparison_operator"
+			 [(match_operand:SI 4 "register_operand" "r")
+			  (match_operand:SI 5 "reg_or_int5_operand" "rJ")])
+	 (match_operand:SI 2 "reg_or_0_operand" "rI")
+	 (match_operand:SI 3 "reg_or_int5_operand" "rJ")))]
+  "TARGET_V850E"
+  "cmp %5,%4 ; cmov %C1,%3,%z2,%0"
+  [(set_attr "length" "6")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*movsicc_tst1"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(if_then_else:SI
+	 (match_operator 1 "comparison_operator"
+			 [(zero_extract:SI
+			   (match_operand:QI 2 "memory_operand" "m")
+			   (const_int 1)
+			   (match_operand 3 "const_int_operand" "n"))
+			  (const_int 0)])
+	 (match_operand:SI 4 "reg_or_int5_operand" "rJ")
+	 (match_operand:SI 5 "reg_or_0_operand" "rI")))]
+  "TARGET_V850E"
+  "tst1 %3,%2 ; cmov %c1,%4,%z5,%0"
+  [(set_attr "length" "8")
+   (set_attr "cc" "clobber")])
+
+(define_insn "*movsicc_tst1_reversed"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(if_then_else:SI
+	 (match_operator 1 "comparison_operator"
+			 [(zero_extract:SI
+			   (match_operand:QI 2 "memory_operand" "m")
+			   (const_int 1)
+			   (match_operand 3 "const_int_operand" "n"))
+			  (const_int 0)])
+	 (match_operand:SI 4 "reg_or_0_operand" "rI")
+	 (match_operand:SI 5 "reg_or_int5_operand" "rJ")))]
+  "TARGET_V850E"
+  "tst1 %3,%2 ; cmov %C1,%5,%z4,%0"
+  [(set_attr "length" "8")
+   (set_attr "cc" "clobber")])
+
+;; Matching for sasf requires combining 4 instructions, so we provide a
+;; dummy pattern to match the first 3, which will always be turned into the
+;; second pattern by subsequent combining.  As above, we must include the
+;; comparison to avoid input reloads in an insn using cc0.
+
+(define_insn "*sasf_1"
+  [(set (match_operand:SI 0 "register_operand" "")
+	(ior:SI (match_operator 1 "comparison_operator" [(cc0) (const_int 0)])
+		(ashift:SI (match_operand:SI 2 "register_operand" "")
+			   (const_int 1))))]
+  "TARGET_V850E"
+  "* gcc_unreachable ();")
+
+(define_insn "*sasf_2"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(ior:SI
+	 (match_operator 1 "comparison_operator"
+			 [(match_operand:SI 3 "register_operand" "r")
+			  (match_operand:SI 4 "reg_or_int5_operand" "rJ")])
+	 (ashift:SI (match_operand:SI 2 "register_operand" "0")
+		    (const_int 1))))]
+  "TARGET_V850E"
+  "cmp %4,%3 ; sasf %c1,%0"
+  [(set_attr "length" "6")
+   (set_attr "cc" "clobber")])
+
+(define_split
+  [(set (match_operand:SI 0 "register_operand" "")
+	(if_then_else:SI
+	 (match_operator 1 "comparison_operator"
+			 [(match_operand:SI 4 "register_operand" "")
+			  (match_operand:SI 5 "reg_or_int5_operand" "")])
+	 (match_operand:SI 2 "const_int_operand" "")
+	 (match_operand:SI 3 "const_int_operand" "")))]
+  "TARGET_V850E
+   && ((INTVAL (operands[2]) ^ INTVAL (operands[3])) == 1)
+   && ((INTVAL (operands[2]) + INTVAL (operands[3])) != 1)
+   && (GET_CODE (operands[5]) == CONST_INT
+      || REGNO (operands[0]) != REGNO (operands[5]))
+   && REGNO (operands[0]) != REGNO (operands[4])"
+  [(set (match_dup 0) (match_dup 6))
+   (set (match_dup 0)
+	(ior:SI (match_op_dup 7 [(match_dup 4) (match_dup 5)])
+		(ashift:SI (match_dup 0) (const_int 1))))]
+  "
+{
+  operands[6] = GEN_INT (INTVAL (operands[2]) >> 1);
+  if (INTVAL (operands[2]) & 0x1)
+    operands[7] = operands[1];
+  else
+    operands[7] = gen_rtx_fmt_ee (reverse_condition (GET_CODE (operands[1])),
+				  GET_MODE (operands[1]),
+				  XEXP (operands[1], 0), XEXP (operands[1], 1));
+}")
+;; ---------------------------------------------------------------------
+;; BYTE SWAP INSTRUCTIONS
+;; ---------------------------------------------------------------------
+
+(define_expand "rotlhi3"
+  [(set (match_operand:HI 0 "register_operand" "")
+	(rotate:HI (match_operand:HI 1 "register_operand" "")
+		   (match_operand:HI 2 "const_int_operand" "")))]
+  "TARGET_V850E"
+  "
+{
+  if (INTVAL (operands[2]) != 8)
+    FAIL;
+}")
+
+(define_insn "*rotlhi3_8"
+  [(set (match_operand:HI 0 "register_operand" "=r")
+	(rotate:HI (match_operand:HI 1 "register_operand" "r")
+		   (const_int 8)))]
+  "TARGET_V850E"
+  "bsh %1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+(define_expand "rotlsi3"
+  [(set (match_operand:SI 0 "register_operand" "")
+	(rotate:SI (match_operand:SI 1 "register_operand" "")
+		   (match_operand:SI 2 "const_int_operand" "")))]
+  "TARGET_V850E"
+  "
+{
+  if (INTVAL (operands[2]) != 16)
+    FAIL;
+}")
+
+(define_insn "*rotlsi3_16"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(rotate:SI (match_operand:SI 1 "register_operand" "r")
+		   (const_int 16)))]
+  "TARGET_V850E"
+  "hsw %1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+;; ----------------------------------------------------------------------
+;; JUMP INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+;; Conditional jump instructions
+
+(define_expand "ble"
+  [(set (pc)
+	(if_then_else (le (cc0)
+			  (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bleu"
+  [(set (pc)
+	(if_then_else (leu (cc0)
+			   (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bge"
+  [(set (pc)
+	(if_then_else (ge (cc0)
+			  (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bgeu"
+  [(set (pc)
+	(if_then_else (geu (cc0)
+			   (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "blt"
+  [(set (pc)
+	(if_then_else (lt (cc0)
+			  (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bltu"
+  [(set (pc)
+	(if_then_else (ltu (cc0)
+			   (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bgt"
+  [(set (pc)
+	(if_then_else (gt (cc0)
+			  (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bgtu"
+  [(set (pc)
+	(if_then_else (gtu (cc0)
+			   (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "beq"
+  [(set (pc)
+	(if_then_else (eq (cc0)
+			  (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_expand "bne"
+  [(set (pc)
+	(if_then_else (ne (cc0)
+			  (const_int 0))
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "")
+
+(define_insn "*branch_normal"
+  [(set (pc)
+	(if_then_else (match_operator 1 "comparison_operator"
+				      [(cc0) (const_int 0)])
+		      (label_ref (match_operand 0 "" ""))
+		      (pc)))]
+  ""
+  "*
+{
+  if ((cc_status.flags & CC_OVERFLOW_UNUSABLE) != 0
+      && (GET_CODE (operands[1]) == GT
+	  || GET_CODE (operands[1]) == GE
+	  || GET_CODE (operands[1]) == LE
+	  || GET_CODE (operands[1]) == LT))
+    return 0;
+
+  if (get_attr_length (insn) == 2)
+    return \"b%b1 %l0\";
+  else
+    return \"b%B1 .+6 ; jr %l0\";
+}"
+ [(set (attr "length")
+    (if_then_else (lt (abs (minus (match_dup 0) (pc)))
+		      (const_int 256))
+		  (const_int 2)
+		  (const_int 6)))
+  (set_attr "cc" "none")])
+
+(define_insn "*branch_invert"
+  [(set (pc)
+	(if_then_else (match_operator 1 "comparison_operator"
+				      [(cc0) (const_int 0)])
+		      (pc)
+		      (label_ref (match_operand 0 "" ""))))]
+  ""
+  "*
+{
+  if ((cc_status.flags & CC_OVERFLOW_UNUSABLE) != 0
+      && (GET_CODE (operands[1]) == GT
+	  || GET_CODE (operands[1]) == GE
+	  || GET_CODE (operands[1]) == LE
+	  || GET_CODE (operands[1]) == LT))
+    return 0;
+  if (get_attr_length (insn) == 2)
+    return \"b%B1 %l0\";
+  else
+    return \"b%b1 .+6 ; jr %l0\";
+}"
+ [(set (attr "length")
+    (if_then_else (lt (abs (minus (match_dup 0) (pc)))
+		      (const_int 256))
+		  (const_int 2)
+		  (const_int 6)))
+  (set_attr "cc" "none")])
+
+;; Unconditional and other jump instructions.
+
+(define_insn "jump"
+  [(set (pc)
+	(label_ref (match_operand 0 "" "")))]
+  ""
+  "*
+{
+  if (get_attr_length (insn) == 2)
+    return \"br %0\";
+  else
+    return \"jr %0\";
+}"
+ [(set (attr "length")
+    (if_then_else (lt (abs (minus (match_dup 0) (pc)))
+		      (const_int 256))
+		  (const_int 2)
+		  (const_int 4)))
+  (set_attr "cc" "none")])
+
+(define_insn "indirect_jump"
+  [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
+  ""
+  "jmp %0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_insn "tablejump"
+  [(set (pc) (match_operand:SI 0 "register_operand" "r"))
+   (use (label_ref (match_operand 1 "" "")))]
+  ""
+  "jmp  %0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_insn "switch"
+  [(set (pc)
+	(plus:SI
+	 (sign_extend:SI
+	  (mem:HI
+	   (plus:SI (ashift:SI (match_operand:SI 0 "register_operand" "r")
+			       (const_int 1))
+		    (label_ref (match_operand 1 "" "")))))
+	 (label_ref (match_dup 1))))]
+  "TARGET_V850E"
+  "switch %0"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_expand "casesi"
+  [(match_operand:SI 0 "register_operand" "")
+   (match_operand:SI 1 "register_operand" "")
+   (match_operand:SI 2 "register_operand" "")
+   (match_operand 3 "" "") (match_operand 4 "" "")]
+  ""
+  "
+{
+  rtx reg = gen_reg_rtx (SImode);
+  rtx tableaddress = gen_reg_rtx (SImode);
+  rtx mem;
+
+  /* Subtract the lower bound from the index.  */
+  emit_insn (gen_subsi3 (reg, operands[0], operands[1]));
+  /* Compare the result against the number of table entries.  */
+  emit_insn (gen_cmpsi (reg, operands[2]));
+  /* Branch to the default label if out of range of the table.  */
+  emit_jump_insn (gen_bgtu (operands[4]));
+
+  /* Shift index for the table array access.  */
+  emit_insn (gen_ashlsi3 (reg, reg, GEN_INT (TARGET_BIG_SWITCH ? 2 : 1)));
+  /* Load the table address into a pseudo.  */
+  emit_insn (gen_movsi (tableaddress,
+			gen_rtx_LABEL_REF (Pmode, operands[3])));
+  /* Add the table address to the index.  */
+  emit_insn (gen_addsi3 (reg, reg, tableaddress));
+  /* Load the table entry.  */
+  mem = gen_const_mem (CASE_VECTOR_MODE, reg);
+  if (! TARGET_BIG_SWITCH)
+    {
+      rtx reg2 = gen_reg_rtx (HImode);
+      emit_insn (gen_movhi (reg2, mem));
+      emit_insn (gen_extendhisi2 (reg, reg2));
+    }
+  else
+    emit_insn (gen_movsi (reg, mem));
+  /* Add the table address.  */
+  emit_insn (gen_addsi3 (reg, reg, tableaddress));
+  /* Branch to the switch label.  */
+  emit_jump_insn (gen_tablejump (reg, operands[3]));
+  DONE;
+}")
+
+;; Call subroutine with no return value.
+
+(define_expand "call"
+  [(call (match_operand:QI 0 "general_operand" "")
+	 (match_operand:SI 1 "general_operand" ""))]
+  ""
+  "
+{
+  if (! call_address_operand (XEXP (operands[0], 0), QImode)
+      || TARGET_LONG_CALLS)
+    XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
+  if (TARGET_LONG_CALLS)
+    emit_call_insn (gen_call_internal_long (XEXP (operands[0], 0), operands[1]));
+  else
+    emit_call_insn (gen_call_internal_short (XEXP (operands[0], 0), operands[1]));
+  
+  DONE;
+}")
+
+(define_insn "call_internal_short"
+  [(call (mem:QI (match_operand:SI 0 "call_address_operand" "S,r"))
+	 (match_operand:SI 1 "general_operand" "g,g"))
+   (clobber (reg:SI 31))]
+  "! TARGET_LONG_CALLS"
+  "@
+  jarl %0,r31
+  jarl .+4,r31 ; add 4,r31 ; jmp %0"
+  [(set_attr "length" "4,8")]
+)
+
+(define_insn "call_internal_long"
+  [(call (mem:QI (match_operand:SI 0 "call_address_operand" "S,r"))
+	 (match_operand:SI 1 "general_operand" "g,g"))
+   (clobber (reg:SI 31))]
+  "TARGET_LONG_CALLS"
+  "*
+  {
+  if (which_alternative == 0)
+    {
+      if (GET_CODE (operands[0]) == REG)
+        return \"jarl %0,r31\";
+      else
+        return \"movhi hi(%0), r0, r11 ; movea lo(%0), r11, r11 ; jarl .+4,r31 ; add 4, r31 ; jmp r11\";
+    }
+  else
+    return \"jarl .+4,r31 ; add 4,r31 ; jmp %0\";
+  }"
+  [(set_attr "length" "16,8")]
+)
+
+;; Call subroutine, returning value in operand 0
+;; (which must be a hard register).
+
+(define_expand "call_value"
+  [(set (match_operand 0 "" "")
+	(call (match_operand:QI 1 "general_operand" "")
+	      (match_operand:SI 2 "general_operand" "")))]
+  ""
+  "
+{
+  if (! call_address_operand (XEXP (operands[1], 0), QImode)
+      || TARGET_LONG_CALLS)
+    XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
+  if (TARGET_LONG_CALLS)
+    emit_call_insn (gen_call_value_internal_long (operands[0],
+	 				          XEXP (operands[1], 0),
+					          operands[2]));
+  else
+    emit_call_insn (gen_call_value_internal_short (operands[0],
+	 				           XEXP (operands[1], 0),
+					           operands[2]));
+  DONE;
+}")
+
+(define_insn "call_value_internal_short"
+  [(set (match_operand 0 "" "=r,r")
+	(call (mem:QI (match_operand:SI 1 "call_address_operand" "S,r"))
+	      (match_operand:SI 2 "general_operand" "g,g")))
+   (clobber (reg:SI 31))]
+  "! TARGET_LONG_CALLS"
+  "@
+  jarl %1,r31
+  jarl .+4,r31 ; add 4,r31 ; jmp %1"
+  [(set_attr "length" "4,8")]
+)
+
+(define_insn "call_value_internal_long"
+  [(set (match_operand 0 "" "=r,r")
+	(call (mem:QI (match_operand:SI 1 "call_address_operand" "S,r"))
+	      (match_operand:SI 2 "general_operand" "g,g")))
+   (clobber (reg:SI 31))]
+  "TARGET_LONG_CALLS"
+  "*
+  {
+  if (which_alternative == 0)
+    {
+      if (GET_CODE (operands[1]) == REG)
+        return \"jarl %1, r31\";
+      else
+      /* Reload can generate this pattern....  */
+        return \"movhi hi(%1), r0, r11 ; movea lo(%1), r11, r11 ; jarl .+4, r31 ; add 4, r31 ; jmp r11\";
+    }
+  else
+    return \"jarl .+4, r31 ; add 4, r31 ; jmp %1\";
+  }"
+  [(set_attr "length" "16,8")]
+)
+
+(define_insn "nop"
+  [(const_int 0)]
+  ""
+  "nop"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+;; ----------------------------------------------------------------------
+;; EXTEND INSTRUCTIONS
+;; ----------------------------------------------------------------------
+
+(define_insn ""
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
+	(zero_extend:SI
+	 (match_operand:HI 1 "nonimmediate_operand" "0,r,T,m")))]
+  "TARGET_V850E"
+  "@
+   zxh %0
+   andi 65535,%1,%0
+   sld.hu %1,%0
+   ld.hu %1,%0"
+  [(set_attr "length" "2,4,2,4")
+   (set_attr "cc" "none_0hit,set_znv,none_0hit,none_0hit")])
+
+(define_insn "zero_extendhisi2"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(zero_extend:SI
+	 (match_operand:HI 1 "register_operand" "r")))]
+  ""
+  "andi 65535,%1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "set_znv")])
+
+(define_insn ""
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
+	(zero_extend:SI
+	 (match_operand:QI 1 "nonimmediate_operand" "0,r,T,m")))]
+  "TARGET_V850E"
+  "@
+   zxb %0
+   andi 255,%1,%0
+   sld.bu %1,%0
+   ld.bu %1,%0"
+  [(set_attr "length" "2,4,2,4")
+   (set_attr "cc" "none_0hit,set_znv,none_0hit,none_0hit")])
+
+(define_insn "zero_extendqisi2"
+  [(set (match_operand:SI 0 "register_operand" "=r")
+	(zero_extend:SI
+	 (match_operand:QI 1 "register_operand" "r")))]
+  ""
+  "andi 255,%1,%0"
+  [(set_attr "length" "4")
+   (set_attr "cc" "set_znv")])
+
+;;- sign extension instructions
+
+;; ??? The extendhisi2 pattern should not emit shifts for v850e?
+
+(define_insn "*extendhisi_insn"
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "0,Q,m")))]
+  "TARGET_V850E"
+  "@
+   sxh %0
+   sld.h %1,%0
+   ld.h %1,%0"
+  [(set_attr "length" "2,2,4")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit")])
+
+;; ??? This is missing a sign extend from memory pattern to match the ld.h
+;; instruction.
+
+(define_expand "extendhisi2"
+  [(set (match_dup 2)
+        (ashift:SI (match_operand:HI 1 "register_operand" "")
+                   (const_int 16)))
+   (set (match_operand:SI 0 "register_operand" "")
+       (ashiftrt:SI (match_dup 2)
+                     (const_int 16)))]
+  ""
+  "
+{
+  operands[1] = gen_lowpart (SImode, operands[1]);
+  operands[2] = gen_reg_rtx (SImode);
+}")
+
+;; ??? The extendqisi2 pattern should not emit shifts for v850e?
+
+(define_insn "*extendqisi_insn"
+  [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "0,Q,m")))]
+  "TARGET_V850E"
+  "@
+   sxb %0
+   sld.b %1,%0
+   ld.b %1,%0"
+  [(set_attr "length" "2,2,4")
+   (set_attr "cc" "none_0hit,none_0hit,none_0hit")])
+
+;; ??? This is missing a sign extend from memory pattern to match the ld.b
+;; instruction.
+
+(define_expand "extendqisi2"
+  [(set (match_dup 2)
+        (ashift:SI (match_operand:QI 1 "register_operand" "")
+                   (const_int 24)))
+   (set (match_operand:SI 0 "register_operand" "")
+        (ashiftrt:SI (match_dup 2)
+                     (const_int 24)))]
+  ""
+  "
+{
+  operands[1] = gen_lowpart (SImode, operands[1]);
+  operands[2] = gen_reg_rtx (SImode);
+}")
+
+;; ----------------------------------------------------------------------
+;; SHIFTS
+;; ----------------------------------------------------------------------
+
+(define_insn "ashlsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r")
+	(ashift:SI
+	 (match_operand:SI 1 "register_operand" "0,0")
+	 (match_operand:SI 2 "nonmemory_operand" "r,N")))]
+  ""
+  "@
+  shl %2,%0
+  shl %2,%0"
+  [(set_attr "length" "4,2")
+   (set_attr "cc" "set_znv")])
+
+(define_insn "lshrsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r")
+	(lshiftrt:SI
+	 (match_operand:SI 1 "register_operand" "0,0")
+	 (match_operand:SI 2 "nonmemory_operand" "r,N")))]
+  ""
+  "@
+  shr %2,%0
+  shr %2,%0"
+  [(set_attr "length" "4,2")
+   (set_attr "cc" "set_znv")])
+
+(define_insn "ashrsi3"
+  [(set (match_operand:SI 0 "register_operand" "=r,r")
+	(ashiftrt:SI
+	 (match_operand:SI 1 "register_operand" "0,0")
+	 (match_operand:SI 2 "nonmemory_operand" "r,N")))]
+  ""
+  "@
+  sar %2,%0
+  sar %2,%0"
+  [(set_attr "length" "4,2")
+   (set_attr "cc" "set_znv")])
+
+;; ----------------------------------------------------------------------
+;; PROLOGUE/EPILOGUE
+;; ----------------------------------------------------------------------
+(define_expand "prologue"
+  [(const_int 0)]
+  ""
+  "expand_prologue (); DONE;")
+
+(define_expand "epilogue"
+  [(return)]
+  ""
+  "
+{
+  /* Try to use the trivial return first.  Else use the
+     full epilogue.  */
+  if (0)
+    emit_jump_insn (gen_return ());
+  else
+    expand_epilogue ();
+  DONE;
+}")
+
+(define_insn "return"
+  [(return)]
+  "reload_completed && compute_frame_size (get_frame_size (), (long *)0) == 0"
+  "jmp [r31]"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_insn "return_internal"
+  [(return)
+   (use (reg:SI 31))]
+  ""
+  "jmp [r31]"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+
+
+;; ----------------------------------------------------------------------
+;; HELPER INSTRUCTIONS for saving the prologue and epilogue registers
+;; ----------------------------------------------------------------------
+
+;; This pattern will match a stack adjust RTX followed by any number of push
+;; RTXs.  These RTXs will then be turned into a suitable call to a worker
+;; function.
+
+;;
+;; Actually, convert the RTXs into a PREPARE instruction.
+;;
+(define_insn ""
+ [(match_parallel 0 "pattern_is_ok_for_prepare"
+   [(set (reg:SI 3)
+	 (plus:SI (reg:SI 3) (match_operand:SI 1 "immediate_operand" "i")))
+    (set (mem:SI (plus:SI (reg:SI 3)
+			  (match_operand:SI 2 "immediate_operand" "i")))
+	 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))])]
+ "TARGET_PROLOG_FUNCTION && TARGET_V850E"
+ "* return construct_prepare_instruction (operands[0]);
+ "
+ [(set_attr "length" "4")
+  (set_attr "cc"     "none")])
+
+(define_insn ""
+ [(match_parallel 0 "pattern_is_ok_for_prologue"
+   [(set (reg:SI 3)
+	 (plus:SI (reg:SI 3) (match_operand:SI 1 "immediate_operand" "i")))
+    (set (mem:SI (plus:SI (reg:SI 3)
+			   (match_operand:SI 2 "immediate_operand" "i")))
+	 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))])]
+ "TARGET_PROLOG_FUNCTION && TARGET_V850"
+ "* return construct_save_jarl (operands[0]);
+ "
+ [(set (attr "length") (if_then_else (eq_attr "long_calls" "yes")
+				     (const_string "16")
+				     (const_string "4")))
+  (set_attr "cc"     "clobber")])
+
+;;
+;; Actually, turn the RTXs into a DISPOSE instruction.
+;;
+(define_insn ""
+ [(match_parallel 0 "pattern_is_ok_for_dispose"
+   [(return)
+    (set (reg:SI 3)
+	 (plus:SI (reg:SI 3) (match_operand:SI 1 "immediate_operand" "i")))
+    (set (match_operand:SI 2 "register_is_ok_for_epilogue" "=r")
+	 (mem:SI (plus:SI (reg:SI 3)
+			  (match_operand:SI 3 "immediate_operand" "i"))))])]
+ "TARGET_PROLOG_FUNCTION && TARGET_V850E"
+ "* return construct_dispose_instruction (operands[0]);
+ "
+ [(set_attr "length" "4")
+  (set_attr "cc"     "none")])
+
+;; This pattern will match a return RTX followed by any number of pop RTXs
+;; and possible a stack adjustment as well.  These RTXs will be turned into
+;; a suitable call to a worker function.
+
+(define_insn ""
+[(match_parallel 0 "pattern_is_ok_for_epilogue"
+   [(return)
+    (set (reg:SI 3)
+	 (plus:SI (reg:SI 3) (match_operand:SI 1 "immediate_operand" "i")))
+    (set (match_operand:SI 2 "register_is_ok_for_epilogue" "=r")
+	 (mem:SI (plus:SI (reg:SI 3)
+			  (match_operand:SI 3 "immediate_operand" "i"))))])]
+ "TARGET_PROLOG_FUNCTION && TARGET_V850"
+ "* return construct_restore_jr (operands[0]);
+ "
+ [(set (attr "length") (if_then_else (eq_attr "long_calls" "yes")
+				     (const_string "12")
+				     (const_string "4")))
+  (set_attr "cc"     "clobber")])
+
+;; Initialize an interrupt function.  Do not depend on TARGET_PROLOG_FUNCTION.
+(define_insn "callt_save_interrupt"
+  [(unspec_volatile [(const_int 0)] 2)]
+    "TARGET_V850E && !TARGET_DISABLE_CALLT"
+    ;; The CALLT instruction stores the next address of CALLT to CTPC register
+    ;; without saving its previous value.  So if the interrupt handler
+    ;; or its caller could possibly execute the CALLT insn, save_interrupt 
+    ;; MUST NOT be called via CALLT.
+    "*
+{
+  output_asm_insn (\"addi -24,   sp, sp\", operands);
+  output_asm_insn (\"st.w r10,   12[sp]\", operands);
+  output_asm_insn (\"stsr ctpc,  r10\",    operands);
+  output_asm_insn (\"st.w r10,   16[sp]\", operands);
+  output_asm_insn (\"stsr ctpsw, r10\",    operands);
+  output_asm_insn (\"st.w r10,   20[sp]\", operands);
+  output_asm_insn (\"callt ctoff(__callt_save_interrupt)\", operands);
+  return \"\";
+}"
+   [(set_attr "length" "26")
+    (set_attr "cc" "none")])
+
+(define_insn "callt_return_interrupt"
+  [(unspec_volatile [(const_int 0)] 3)]
+  "TARGET_V850E && !TARGET_DISABLE_CALLT"
+  "callt ctoff(__callt_return_interrupt)"
+  [(set_attr "length" "2")
+   (set_attr "cc" "clobber")])
+
+(define_insn "save_interrupt"
+  [(set (reg:SI 3) (plus:SI (reg:SI 3) (const_int -16)))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int -16))) (reg:SI 30))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int -12))) (reg:SI 4))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int  -8))) (reg:SI 1))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int  -4))) (reg:SI 10))]
+  ""
+  "*
+{
+  if (TARGET_PROLOG_FUNCTION && !TARGET_LONG_CALLS)
+    return \"add -16,sp\;st.w r10,12[sp]\;jarl __save_interrupt,r10\";
+  else
+    {
+      output_asm_insn (\"add   -16, sp\", operands);
+      output_asm_insn (\"st.w  r10, 12[sp]\", operands);
+      output_asm_insn (\"st.w  ep, 0[sp]\", operands);
+      output_asm_insn (\"st.w  gp, 4[sp]\", operands);
+      output_asm_insn (\"st.w  r1, 8[sp]\", operands);
+      output_asm_insn (\"movhi hi(__ep), r0, ep\", operands);
+      output_asm_insn (\"movea lo(__ep), ep, ep\", operands);
+      output_asm_insn (\"movhi hi(__gp), r0, gp\", operands);
+      output_asm_insn (\"movea lo(__gp), gp, gp\", operands);
+      return \"\";
+    }
+}"
+  [(set (attr "length")
+        (if_then_else (ne (symbol_ref "TARGET_LONG_CALLS") (const_int 0))
+                       (const_int 10)
+                       (const_int 34)))
+   (set_attr "cc" "clobber")])
+  
+;; Restore r1, r4, r10, and return from the interrupt
+(define_insn "return_interrupt"
+  [(return)
+   (set (reg:SI 3)  (plus:SI (reg:SI 3) (const_int 16)))
+   (set (reg:SI 10) (mem:SI (plus:SI (reg:SI 3) (const_int 12))))
+   (set (reg:SI 1)  (mem:SI (plus:SI (reg:SI 3) (const_int  8))))
+   (set (reg:SI 4)  (mem:SI (plus:SI (reg:SI 3) (const_int  4))))
+   (set (reg:SI 30) (mem:SI (reg:SI 3)))]
+  ""
+  "*
+{
+  if (TARGET_PROLOG_FUNCTION && !TARGET_LONG_CALLS)
+    return \"jr __return_interrupt\";
+  else 
+    {
+      output_asm_insn (\"ld.w 0[sp],  ep\",   operands);
+      output_asm_insn (\"ld.w 4[sp],  gp\",   operands);
+      output_asm_insn (\"ld.w 8[sp],  r1\",   operands);
+      output_asm_insn (\"ld.w 12[sp], r10\", operands);
+      output_asm_insn (\"addi 16, sp, sp\",   operands);
+      output_asm_insn (\"reti\",            operands);
+      return \"\";
+    }
+}"
+  [(set (attr "length")
+        (if_then_else (ne (symbol_ref "TARGET_LONG_CALLS") (const_int 0))
+                       (const_int 4)
+                       (const_int 24)))
+   (set_attr "cc" "clobber")])
+
+;; Save all registers except for the registers saved in save_interrupt when
+;; an interrupt function makes a call.
+;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
+;; all of memory.  This blocks insns from being moved across this point.
+;; This is needed because the rest of the compiler is not ready to handle
+;; insns this complicated.
+
+(define_insn "callt_save_all_interrupt"
+  [(unspec_volatile [(const_int 0)] 0)]
+  "TARGET_V850E && !TARGET_DISABLE_CALLT"
+  "callt ctoff(__callt_save_all_interrupt)"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_insn "save_all_interrupt"
+  [(unspec_volatile [(const_int 0)] 0)]
+  ""
+  "*
+{
+  if (TARGET_PROLOG_FUNCTION && !TARGET_LONG_CALLS)
+    return \"jarl __save_all_interrupt,r10\";
+
+  output_asm_insn (\"addi -120, sp, sp\", operands);
+
+  if (TARGET_EP)
+    {
+      output_asm_insn (\"mov ep, r1\", operands);
+      output_asm_insn (\"mov sp, ep\", operands);
+      output_asm_insn (\"sst.w r31, 116[ep]\", operands);
+      output_asm_insn (\"sst.w r2,  112[ep]\", operands);
+      output_asm_insn (\"sst.w gp,  108[ep]\", operands);
+      output_asm_insn (\"sst.w r6,  104[ep]\", operands);
+      output_asm_insn (\"sst.w r7,  100[ep]\", operands);
+      output_asm_insn (\"sst.w r8,   96[ep]\", operands);
+      output_asm_insn (\"sst.w r9,   92[ep]\", operands);
+      output_asm_insn (\"sst.w r11,  88[ep]\", operands);
+      output_asm_insn (\"sst.w r12,  84[ep]\", operands);
+      output_asm_insn (\"sst.w r13,  80[ep]\", operands);
+      output_asm_insn (\"sst.w r14,  76[ep]\", operands);
+      output_asm_insn (\"sst.w r15,  72[ep]\", operands);
+      output_asm_insn (\"sst.w r16,  68[ep]\", operands);
+      output_asm_insn (\"sst.w r17,  64[ep]\", operands);
+      output_asm_insn (\"sst.w r18,  60[ep]\", operands);
+      output_asm_insn (\"sst.w r19,  56[ep]\", operands);
+      output_asm_insn (\"sst.w r20,  52[ep]\", operands);
+      output_asm_insn (\"sst.w r21,  48[ep]\", operands);
+      output_asm_insn (\"sst.w r22,  44[ep]\", operands);
+      output_asm_insn (\"sst.w r23,  40[ep]\", operands);
+      output_asm_insn (\"sst.w r24,  36[ep]\", operands);
+      output_asm_insn (\"sst.w r25,  32[ep]\", operands);
+      output_asm_insn (\"sst.w r26,  28[ep]\", operands);
+      output_asm_insn (\"sst.w r27,  24[ep]\", operands);
+      output_asm_insn (\"sst.w r28,  20[ep]\", operands);
+      output_asm_insn (\"sst.w r29,  16[ep]\", operands);
+      output_asm_insn (\"mov   r1,   ep\", operands);
+    }
+  else
+    {
+      output_asm_insn (\"st.w r31, 116[sp]\", operands);
+      output_asm_insn (\"st.w r2,  112[sp]\", operands);
+      output_asm_insn (\"st.w gp,  108[sp]\", operands);
+      output_asm_insn (\"st.w r6,  104[sp]\", operands);
+      output_asm_insn (\"st.w r7,  100[sp]\", operands);
+      output_asm_insn (\"st.w r8,   96[sp]\", operands);
+      output_asm_insn (\"st.w r9,   92[sp]\", operands);
+      output_asm_insn (\"st.w r11,  88[sp]\", operands);
+      output_asm_insn (\"st.w r12,  84[sp]\", operands);
+      output_asm_insn (\"st.w r13,  80[sp]\", operands);
+      output_asm_insn (\"st.w r14,  76[sp]\", operands);
+      output_asm_insn (\"st.w r15,  72[sp]\", operands);
+      output_asm_insn (\"st.w r16,  68[sp]\", operands);
+      output_asm_insn (\"st.w r17,  64[sp]\", operands);
+      output_asm_insn (\"st.w r18,  60[sp]\", operands);
+      output_asm_insn (\"st.w r19,  56[sp]\", operands);
+      output_asm_insn (\"st.w r20,  52[sp]\", operands);
+      output_asm_insn (\"st.w r21,  48[sp]\", operands);
+      output_asm_insn (\"st.w r22,  44[sp]\", operands);
+      output_asm_insn (\"st.w r23,  40[sp]\", operands);
+      output_asm_insn (\"st.w r24,  36[sp]\", operands);
+      output_asm_insn (\"st.w r25,  32[sp]\", operands);
+      output_asm_insn (\"st.w r26,  28[sp]\", operands);
+      output_asm_insn (\"st.w r27,  24[sp]\", operands);
+      output_asm_insn (\"st.w r28,  20[sp]\", operands);
+      output_asm_insn (\"st.w r29,  16[sp]\", operands);
+    }
+    
+  return \"\";
+}"
+  [(set (attr "length")
+        (if_then_else (ne (symbol_ref "TARGET_LONG_CALLS") (const_int 0))
+                       (const_int 4)
+                       (const_int 62)
+	))
+   (set_attr "cc" "clobber")])
+
+(define_insn "_save_all_interrupt"
+  [(unspec_volatile [(const_int 0)] 0)]
+  "TARGET_V850 && ! TARGET_LONG_CALLS"
+  "jarl __save_all_interrupt,r10"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+;; Restore all registers saved when an interrupt function makes a call.
+;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
+;; all of memory.  This blocks insns from being moved across this point.
+;; This is needed because the rest of the compiler is not ready to handle
+;; insns this complicated.
+
+(define_insn "callt_restore_all_interrupt"
+  [(unspec_volatile [(const_int 0)] 1)]
+  "TARGET_V850E && !TARGET_DISABLE_CALLT"
+  "callt ctoff(__callt_restore_all_interrupt)"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_insn "restore_all_interrupt"
+  [(unspec_volatile [(const_int 0)] 1)]
+  ""
+  "*
+{
+  if (TARGET_PROLOG_FUNCTION && !TARGET_LONG_CALLS)
+    return \"jarl __restore_all_interrupt,r10\";
+
+  if (TARGET_EP)
+    {
+      output_asm_insn (\"mov   ep,      r1\", operands);
+      output_asm_insn (\"mov   sp,      ep\", operands);
+      output_asm_insn (\"sld.w 116[ep], r31\", operands);
+      output_asm_insn (\"sld.w 112[ep], r2\", operands);
+      output_asm_insn (\"sld.w 108[ep], gp\", operands);
+      output_asm_insn (\"sld.w 104[ep], r6\", operands);
+      output_asm_insn (\"sld.w 100[ep], r7\", operands);
+      output_asm_insn (\"sld.w 96[ep],  r8\", operands);
+      output_asm_insn (\"sld.w 92[ep],  r9\", operands);
+      output_asm_insn (\"sld.w 88[ep],  r11\", operands);
+      output_asm_insn (\"sld.w 84[ep],  r12\", operands);
+      output_asm_insn (\"sld.w 80[ep],  r13\", operands);
+      output_asm_insn (\"sld.w 76[ep],  r14\", operands);
+      output_asm_insn (\"sld.w 72[ep],  r15\", operands);
+      output_asm_insn (\"sld.w 68[ep],  r16\", operands);
+      output_asm_insn (\"sld.w 64[ep],  r17\", operands);
+      output_asm_insn (\"sld.w 60[ep],  r18\", operands);
+      output_asm_insn (\"sld.w 56[ep],  r19\", operands);
+      output_asm_insn (\"sld.w 52[ep],  r20\", operands);
+      output_asm_insn (\"sld.w 48[ep],  r21\", operands);
+      output_asm_insn (\"sld.w 44[ep],  r22\", operands);
+      output_asm_insn (\"sld.w 40[ep],  r23\", operands);
+      output_asm_insn (\"sld.w 36[ep],  r24\", operands);
+      output_asm_insn (\"sld.w 32[ep],  r25\", operands);
+      output_asm_insn (\"sld.w 28[ep],  r26\", operands);
+      output_asm_insn (\"sld.w 24[ep],  r27\", operands);
+      output_asm_insn (\"sld.w 20[ep],  r28\", operands);
+      output_asm_insn (\"sld.w 16[ep],  r29\", operands);
+      output_asm_insn (\"mov   r1,      ep\", operands);
+    }
+  else
+    {
+      output_asm_insn (\"ld.w 116[sp], r31\", operands);
+      output_asm_insn (\"ld.w 112[sp], r2\", operands);
+      output_asm_insn (\"ld.w 108[sp], gp\", operands);
+      output_asm_insn (\"ld.w 104[sp], r6\", operands);
+      output_asm_insn (\"ld.w 100[sp], r7\", operands);
+      output_asm_insn (\"ld.w 96[sp],  r8\", operands);
+      output_asm_insn (\"ld.w 92[sp],  r9\", operands);
+      output_asm_insn (\"ld.w 88[sp],  r11\", operands);
+      output_asm_insn (\"ld.w 84[sp],  r12\", operands);
+      output_asm_insn (\"ld.w 80[sp],  r13\", operands);
+      output_asm_insn (\"ld.w 76[sp],  r14\", operands);
+      output_asm_insn (\"ld.w 72[sp],  r15\", operands);
+      output_asm_insn (\"ld.w 68[sp],  r16\", operands);
+      output_asm_insn (\"ld.w 64[sp],  r17\", operands);
+      output_asm_insn (\"ld.w 60[sp],  r18\", operands);
+      output_asm_insn (\"ld.w 56[sp],  r19\", operands);
+      output_asm_insn (\"ld.w 52[sp],  r20\", operands);
+      output_asm_insn (\"ld.w 48[sp],  r21\", operands);
+      output_asm_insn (\"ld.w 44[sp],  r22\", operands);
+      output_asm_insn (\"ld.w 40[sp],  r23\", operands);
+      output_asm_insn (\"ld.w 36[sp],  r24\", operands);
+      output_asm_insn (\"ld.w 32[sp],  r25\", operands);
+      output_asm_insn (\"ld.w 28[sp],  r26\", operands);
+      output_asm_insn (\"ld.w 24[sp],  r27\", operands);
+      output_asm_insn (\"ld.w 20[sp],  r28\", operands);
+      output_asm_insn (\"ld.w 16[sp],  r29\", operands);
+    }
+  output_asm_insn (\"addi  120, sp, sp\", operands);
+  return \"\";
+}"
+  [(set (attr "length")
+        (if_then_else (ne (symbol_ref "TARGET_LONG_CALLS") (const_int 0))
+                       (const_int 4)
+                       (const_int 62)
+	))
+   (set_attr "cc" "clobber")])
+
+(define_insn "_restore_all_interrupt"
+  [(unspec_volatile [(const_int 0)] 1)]
+  "TARGET_V850 && ! TARGET_LONG_CALLS"
+  "jarl __restore_all_interrupt,r10"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+
+;; Save r6-r9 for a variable argument function
+(define_insn "save_r6_r9_v850e"
+  [(set (mem:SI (reg:SI 3)) (reg:SI 6))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int 4))) (reg:SI 7))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int 8))) (reg:SI 8))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int 12))) (reg:SI 9))
+  ]
+  "TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT"
+  "callt ctoff(__callt_save_r6_r9)"
+  [(set_attr "length" "2")
+   (set_attr "cc" "none")])
+
+(define_insn "save_r6_r9"
+  [(set (mem:SI (reg:SI 3)) (reg:SI 6))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int 4))) (reg:SI 7))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int 8))) (reg:SI 8))
+   (set (mem:SI (plus:SI (reg:SI 3) (const_int 12))) (reg:SI 9))
+   (clobber (reg:SI 10))]
+  "TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS"
+  "jarl __save_r6_r9,r10"
+  [(set_attr "length" "4")
+   (set_attr "cc" "clobber")])
+