changeset 3:d43292db8c8c

finished merging
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 19:00:23 +0900
parents 0b3575e68bac
children 60db277cbe4d
files gcc/calls.c gcc/cbc-goto.h
diffstat 2 files changed, 839 insertions(+), 1 deletions(-) [+]
line wrap: on
line diff
--- a/gcc/calls.c	Fri Jul 17 18:01:20 2009 +0900
+++ b/gcc/calls.c	Fri Jul 17 19:00:23 2009 +0900
@@ -2395,7 +2395,7 @@
 
       // fprintf(stderr, "\n\tgoto code segment.\n");
       args_size.constant = CbC_ARGS_SIZE;
-      return expand_cbc_goto(exp, target, fndecl, funtype,
+      return expand_cbc_goto(exp, target, fndecl, funtype, fntype,
 	      addr, ignore, flags, num_actuals, args, &args_size,
 	      args_so_far,
 	      old_stack_level, reg_parm_stack_space, old_pending_adj,
@@ -2493,6 +2493,7 @@
         adjusted_args_size.constant = CbC_ARGS_SIZE;
         compute_argument_block_size (reg_parm_stack_space,
                                      &adjusted_args_size,
+									 fndecl, fntype,
                                      (pass == 0 ? 0
                                       : preferred_stack_boundary));
       }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gcc/cbc-goto.h	Fri Jul 17 19:00:23 2009 +0900
@@ -0,0 +1,837 @@
+
+#define CbC_ARGS_SIZE 256
+
+static void
+preexpand_argument_expr (struct arg_data *, int);
+
+static void
+determine_order(int *, int);
+
+static int
+expand_one_arg_push (struct arg_data *, rtx, int, int, int);
+
+static void
+push_overlaps(struct arg_data *, int);
+
+static int
+check_frame_offset(rtx);
+
+
+static rtx
+expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype,
+		tree addr,
+		int ignore,
+		int flags,
+		int num_actuals,
+		struct arg_data *args,
+		struct args_size *args_size,
+		CUMULATIVE_ARGS args_so_far,
+		rtx old_stack_level,
+		int reg_parm_stack_space,
+		int old_pending_adj,
+		unsigned HOST_WIDE_INT preferred_stack_boundary,
+		unsigned HOST_WIDE_INT preferred_unit_stack_boundary,
+		rtx structure_value_addr,
+		//int structure_value_addr_parm,
+		int old_inhibit_defer_pop
+		)
+{
+
+  /* folowing variables is just copied from expand_call.  */
+
+  int pass = 0;
+  int i;
+#ifdef REG_PARM_STACK_SPACE
+  /* Define the boundary of the register parm stack space that needs to be
+     saved, if any.  */
+#endif
+  rtx funexp;
+  rtx valreg;
+  struct args_size adjusted_args_size;
+  int unadjusted_args_size;
+  int reg_parm_seen;
+  rtx static_chain_value;
+  int old_stack_allocated;
+  int old_stack_pointer_delta = 0;
+  int old_stack_arg_under_construction = 0;
+  rtx call_fusage;
+  char *stack_usage_map_buf = NULL;
+  rtx argblock = 0;
+  HOST_WIDE_INT struct_value_size = 0;
+  int pcc_struct_value = 0;
+  int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
+  char *initial_stack_usage_map = stack_usage_map;
+  rtx tail_call_insns = NULL_RTX;
+
+
+  int *store_order;
+
+
+  int sibcall_failure = 0;
+  /* We want to emit any pending stack adjustments before the tail
+     recursion "call".  That way we know any adjustment after the tail
+     recursion call can be ignored if we indeed use the tail
+     call expansion.  */
+  int save_pending_stack_adjust = 0;
+  int save_stack_pointer_delta = 0;
+  rtx insns;
+  rtx before_call, next_arg_reg;
+
+  /* for environment.  */
+  rtx env_rtx;
+  tree env_tree;
+
+  env_tree = TREE_OPERAND(exp, 2);
+
+
+  /* State variables we need to save and restore between
+     iterations.  */
+  save_pending_stack_adjust = pending_stack_adjust;
+  save_stack_pointer_delta = stack_pointer_delta;
+  flags |= ECF_SIBCALL;
+
+  /* Other state variables that we must reinitialize each time
+     through the loop (that are not initialized by the loop itself).  */
+  argblock = 0;
+  call_fusage = 0;
+
+  /* Start a new sequence for the normal call case.
+
+     From this point on, if the sibling call fails, we want to set
+     sibcall_failure instead of continuing the loop.  */
+  start_sequence ();
+
+  /* Don't let pending stack adjusts add up to too much.
+     Also, do all pending adjustments now if there is any chance
+     this might be a call to alloca or if we are expanding a sibling
+     call sequence or if we are calling a function that is to return
+     with stack pointer depressed.
+     Also do the adjustments before a throwing call, otherwise
+     exception handling can fail; PR 19225. */
+  if (pending_stack_adjust >= 32
+      || (pending_stack_adjust > 0
+	&& (flags & ECF_MAY_BE_ALLOCA))
+      || (pending_stack_adjust > 0
+	&& flag_exceptions && !(flags & ECF_NOTHROW))
+      || pass == 0)
+    do_pending_stack_adjust ();
+
+
+  if (pass == 0 && crtl->stack_protect_guard)
+    stack_protect_epilogue ();
+
+  adjusted_args_size = *args_size;
+  /* Compute the actual size of the argument block required.  The variable
+     and constant sizes must be combined, the size may have to be rounded,
+     and there may be a minimum required size.  When generating a sibcall
+     pattern, do not round up, since we'll be re-using whatever space our
+     caller provided.  */
+  unadjusted_args_size
+    = compute_argument_block_size (reg_parm_stack_space,
+	&adjusted_args_size,
+	fndecl, fntype,
+	(pass == 0 ? 0
+	 : preferred_stack_boundary));
+
+  old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
+
+  /* The argument block when performing a sibling call is the
+     incoming argument block.  */
+  /*
+     if ( 0 && !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) )
+     {
+     argblock = virtual_outgoing_args_rtx;
+     }
+     else
+   */
+
+  if ( env_tree!=NULL_TREE )
+    {
+	  /*
+	   * ebp <= TREE_VALUE ( env_tree );
+	   * argsp <= TREE_PURPOSE ( env_tree );
+	   *
+	   */
+      //rtx tmp;
+      /* compute environment.  */
+      push_temp_slots ();
+      /* expand_expr shouldn't be here... it's moved later.  */
+      //env_rtx = expand_expr ( env_tree, NULL_RTX,
+	      //VOIDmode, EXPAND_STACK_PARM);
+      //argblock = hard_frame_pointer_rtx;
+      //argblock = env_rtx;
+      //argblock = plus_constant (argblock, -current_function_arg_offset_rtx);
+      //tmp = gen_rtx_MINUS (Pmode, hard_frame_pointer_rtx, virtual_incoming_args_rtx);
+      //argblock = gen_rtx_MINUS (Pmode, env_rtx, tmp);
+      //argblock = plus_constant (argblock, frame_offset);
+      argblock = expand_expr ( TREE_VALUE (env_tree), NULL_RTX,
+	      VOIDmode, EXPAND_STACK_PARM);
+      env_rtx = expand_expr ( TREE_PURPOSE (env_tree), NULL_RTX,
+	      VOIDmode, EXPAND_STACK_PARM);
+      preserve_temp_slots (argblock);
+      pop_temp_slots ();
+    }
+  else
+    {
+      //argblock = virtual_incoming_args_rtx;
+      argblock = crtl->args.internal_arg_pointer;
+
+      argblock
+#ifdef STACK_GROWS_DOWNWARD
+	  = plus_constant (argblock, crtl->args.pretend_args_size);
+#else
+	  = plus_constant (argblock, -crtl->args.pretend_args_size);
+#endif
+    }
+
+
+  stored_args_map = sbitmap_alloc (args_size->constant);
+  sbitmap_zero (stored_args_map);
+
+
+  if (ACCUMULATE_OUTGOING_ARGS)
+    {
+      /* The save/restore code in store_one_arg handles all
+	 cases except one: a constructor call (including a C
+	 function returning a BLKmode struct) to initialize
+	 an argument.  */
+      if (stack_arg_under_construction)
+	{
+	  rtx push_size
+	      = GEN_INT (adjusted_args_size.constant
+		      + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
+				  : TREE_TYPE (fndecl))) ? 0
+			  : reg_parm_stack_space));
+	  if (old_stack_level == 0)
+	    {
+	      emit_stack_save (SAVE_BLOCK, &old_stack_level,
+		      NULL_RTX);
+	      old_stack_pointer_delta = stack_pointer_delta;
+	      old_pending_adj = pending_stack_adjust;
+	      pending_stack_adjust = 0;
+	      /* stack_arg_under_construction says whether a stack
+		 arg is being constructed at the old stack level.
+		 Pushing the stack gets a clean outgoing argument
+		 block.  */
+	      old_stack_arg_under_construction
+		  = stack_arg_under_construction;
+	      stack_arg_under_construction = 0;
+	      /* Make a new map for the new argument list.  */
+	      if (stack_usage_map_buf)
+		  free (stack_usage_map_buf);
+	      stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
+	      stack_usage_map = stack_usage_map_buf;
+	      memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
+	      highest_outgoing_arg_in_use = 0;
+	    }
+	  allocate_dynamic_stack_space (push_size, NULL_RTX,
+		  BITS_PER_UNIT);
+	}
+
+      /* If argument evaluation might modify the stack pointer,
+	 copy the address of the argument list to a register.  */
+      for (i = 0; i < num_actuals; i++)
+	if (args[i].pass_on_stack)
+	  {
+	    argblock = copy_addr_to_reg (argblock);
+	    break;
+	  }
+  }
+
+  compute_argument_addresses (args, argblock, num_actuals);
+
+  /* in the case that
+     a function goto codesegment.  
+     adjust stack space.  */
+  if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) )
+  //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) )
+    {
+      HOST_WIDE_INT padding;
+      padding = CbC_ARGS_SIZE - 
+	  (crtl->args.size - crtl->args.pretend_args_size);
+      if (0&&padding > 0)
+	  anti_adjust_stack (GEN_INT (padding));
+    }
+
+  /* Now that the stack is properly aligned, pops can't safely
+     be deferred during the evaluation of the arguments.  */
+  NO_DEFER_POP;
+
+  funexp = rtx_for_function_call (fndecl, addr);
+
+  /* Figure out the register where the value, if any, will come back.  */
+  valreg = 0;
+
+
+  /* Precompute all register parameters.  It isn't safe to compute anything
+     once we have started filling any specific hard regs.  */
+  precompute_register_parameters (num_actuals, args, &reg_parm_seen);
+
+  /* operand[2] is environment.  */
+  //if (TREE_OPERAND (exp, 2))
+    //static_chain_value = expand_normal (TREE_OPERAND (exp, 2));
+  //else
+    static_chain_value = 0;
+
+
+  /* parallel assignment  */
+  store_order = alloca (num_actuals * sizeof (int));
+  memset (store_order, 0, num_actuals * sizeof (int));
+
+  /* fill the arg[i]->exprs. */
+  for (i = 0; i < num_actuals; i++)
+    {
+      if (args[i].reg == 0 || args[i].pass_on_stack)
+	{
+	  preexpand_argument_expr (&args[i],
+		  adjusted_args_size.var != 0);
+	}
+    }
+
+
+  /* push overlapped argument to stack.  */
+  push_overlaps(args, num_actuals);
+
+  /* determine ordering to store arguments.
+     and generate RTL that store some variable temporary, if it needed.*/
+  /* now... this function do nothing.  */
+  determine_order(store_order, num_actuals);
+
+  /* push arguments in the order .  */
+  for (i = 0; i < num_actuals; i++)
+    {
+      if (args[store_order[i]].reg == 0
+	      || args[store_order[i]].pass_on_stack
+	      || args[store_order[i]].partial!=0 )
+	{
+	  expand_one_arg_push (&args[store_order[i]], argblock, flags,
+		  adjusted_args_size.var != 0,
+		  reg_parm_stack_space);
+	}
+    }
+
+
+  /* If register arguments require space on the stack and stack space
+     was not preallocated, allocate stack space here for arguments
+     passed in registers.  */
+#ifdef OUTGOING_REG_PARM_STACK_SPACE
+  //if (!ACCUMULATE_OUTGOING_ARGS
+  //&& must_preallocate == 0 && reg_parm_stack_space > 0)
+  //anti_adjust_stack (GEN_INT (reg_parm_stack_space));
+#endif
+
+  /*   */
+  funexp = prepare_call_address (funexp, static_chain_value,
+      &call_fusage, reg_parm_seen, pass == 0);
+
+  /* store args into register.  */
+  load_register_parameters (args, num_actuals, &call_fusage, flags,
+      //pass == 0, &sibcall_failure);
+			   0, NULL);
+
+  /* Save a pointer to the last insn before the call, so that we can
+     later safely search backwards to find the CALL_INSN.  */
+  before_call = get_last_insn ();
+
+  /* Set up next argument register.  For sibling calls on machines
+     with register windows this should be the incoming register.  */
+#ifdef FUNCTION_INCOMING_ARG
+  if (pass == 0)
+    next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
+	void_type_node, 1);
+  else
+#endif
+    next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
+	void_type_node, 1);
+
+  /* All arguments and registers used for the call must be set up by
+     now!  */
+
+  /* Stack must be properly aligned now.  */
+  gcc_assert (!pass
+      || !(stack_pointer_delta % preferred_unit_stack_boundary));
+
+  /* store environment.  */
+  if ( env_tree!=NULL )
+    {
+      emit_insn (gen_rtx_CLOBBER (VOIDmode,
+		  gen_rtx_MEM (BLKmode,
+		      hard_frame_pointer_rtx)));
+      emit_move_insn (hard_frame_pointer_rtx, env_rtx);
+      emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+      //pop_temp_slots ();
+
+      emit_indirect_jump (funexp);
+    }
+
+  /* Generate the actual call instruction.  */
+  emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
+      adjusted_args_size.constant, struct_value_size,
+      //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
+      next_arg_reg, valreg, 0, call_fusage,
+      flags, & args_so_far);
+
+  /* If a non-BLKmode value is returned at the most significant end
+     of a register, shift the register right by the appropriate amount
+     and update VALREG accordingly.  BLKmode values are handled by the
+     group load/store machinery below.  */
+  if (!structure_value_addr
+      && !pcc_struct_value
+      && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
+      && targetm.calls.return_in_msb (TREE_TYPE (exp)))
+    {
+      if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
+	  sibcall_failure = 1;
+      valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
+    }
+
+
+  /* For calls to `setjmp', etc., inform flow.c it should complain
+     if nonvolatile values are live.  For functions that cannot return,
+     inform flow that control does not fall through.  */
+
+  if ((flags & ECF_NORETURN) || pass == 0)
+    {
+      /* The barrier must be emitted
+	 immediately after the CALL_INSN.  Some ports emit more
+	 than just a CALL_INSN above, so we must search for it here.  */
+
+      rtx last = get_last_insn ();
+      while (!CALL_P (last))
+	{
+	  last = PREV_INSN (last);
+	  /* There was no CALL_INSN?  */
+	  gcc_assert (last != before_call);
+	}
+
+      emit_barrier_after (last);
+
+      /* Stack adjustments after a noreturn call are dead code.
+	 However when NO_DEFER_POP is in effect, we must preserve
+	 stack_pointer_delta.  */
+      if (inhibit_defer_pop == 0)
+	{
+	  stack_pointer_delta = old_stack_allocated;
+	  pending_stack_adjust = 0;
+	}
+    }
+
+  /* If value type not void, return an rtx for the value.  */
+
+  if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
+      || ignore)
+    target = const0_rtx;
+
+  if (targetm.calls.promote_function_return(funtype))
+    {
+      /* If we promoted this return value, make the proper SUBREG.
+	 TARGET might be const0_rtx here, so be careful.  */
+      if (REG_P (target)
+	      && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
+	      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
+	{
+	  tree type = TREE_TYPE (exp);
+	  int unsignedp = TYPE_UNSIGNED (type);
+	  int offset = 0;
+	  enum machine_mode pmode;
+
+	  pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
+	  /* If we don't promote as expected, something is wrong.  */
+	  gcc_assert (GET_MODE (target) == pmode);
+
+	  if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
+		  && (GET_MODE_SIZE (GET_MODE (target))
+		      > GET_MODE_SIZE (TYPE_MODE (type))))
+	    {
+	      offset = GET_MODE_SIZE (GET_MODE (target))
+		  - GET_MODE_SIZE (TYPE_MODE (type));
+	      if (! BYTES_BIG_ENDIAN)
+		  offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
+	      else if (! WORDS_BIG_ENDIAN)
+		  offset %= UNITS_PER_WORD;
+	    }
+	  target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
+	  SUBREG_PROMOTED_VAR_P (target) = 1;
+	  SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
+	}
+    }
+
+  /* If size of args is variable or this was a constructor call for a stack
+     argument, restore saved stack-pointer value.  */
+
+  if (old_stack_level)
+    {
+      emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+      stack_pointer_delta = old_stack_pointer_delta;
+      pending_stack_adjust = old_pending_adj;
+      old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
+      stack_arg_under_construction = old_stack_arg_under_construction;
+      highest_outgoing_arg_in_use = initial_highest_arg_in_use;
+      stack_usage_map = initial_stack_usage_map;
+    }
+
+  /* If this was alloca, record the new stack level for nonlocal gotos.
+     Check for the handler slots since we might not have a save area
+     for non-local gotos.  */
+
+  if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
+    update_nonlocal_goto_save_area ();
+
+  /* Free up storage we no longer need.  */
+  for (i = 0; i < num_actuals; ++i)
+    if (args[i].aligned_regs)
+      free (args[i].aligned_regs);
+
+  insns = get_insns ();
+  end_sequence ();
+
+  tail_call_insns = insns;
+
+  /* Restore the pending stack adjustment now that we have
+     finished generating the sibling call sequence.  */
+
+  pending_stack_adjust = save_pending_stack_adjust;
+  stack_pointer_delta = save_stack_pointer_delta;
+
+  /* Prepare arg structure for next iteration.  */
+  for (i = 0; i < num_actuals; i++)
+    {
+      args[i].value = 0;
+      args[i].aligned_regs = 0;
+      args[i].stack = 0;
+    }
+
+  sbitmap_free (stored_args_map);
+
+
+  emit_insn(tail_call_insns);
+  crtl->tail_call_emit = true;
+
+  return target;
+}
+
+
+static void
+preexpand_argument_expr (struct arg_data *arg,
+	       int variable_size ATTRIBUTE_UNUSED)
+{
+  tree pval = arg->tree_value;
+  rtx reg = 0;
+  int partial = 0;
+
+  if (TREE_CODE (pval) == ERROR_MARK)
+    return;
+
+  /* Push a new temporary level for any temporaries we make for
+     this argument.  */
+  push_temp_slots ();
+
+
+  /* If this isn't going to be placed on both the stack and in registers,
+     set up the register and number of words.  */
+  if (! arg->pass_on_stack)
+    {
+      //if (flags & ECF_SIBCALL)
+      reg = arg->tail_call_reg;
+      //else
+      //reg = arg->reg;
+      partial = arg->partial;
+    }
+
+  /* Being passed entirely in a register.  We shouldn't be called in
+     this case.  */
+  gcc_assert (reg == 0 || partial != 0);
+
+  /* If this arg needs special alignment, don't load the registers
+     here.  */
+  if (arg->n_aligned_regs != 0)
+    reg = 0;
+
+  /* Start a new sequence for the arg->exprs.  */
+  start_sequence ();
+
+
+  if (arg->pass_on_stack)
+    stack_arg_under_construction++;
+
+  arg->value = expand_expr (pval,
+      (partial
+       || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
+      ? NULL_RTX : arg->stack,
+      VOIDmode, EXPAND_STACK_PARM);
+
+  /* If we are promoting object (or for any other reason) the mode
+     doesn't agree, convert the mode.  */
+
+  if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
+    arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
+	arg->value, arg->unsignedp);
+
+  if (arg->pass_on_stack)
+    stack_arg_under_construction--;
+
+  arg->exprs = get_insns ();
+  end_sequence ();
+
+  if (arg->exprs) emit_insn(arg->exprs);
+
+  preserve_temp_slots (arg->value);
+  pop_temp_slots ();
+
+  return ;
+}
+
+static int
+expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags,
+	       int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
+{
+  tree pval = arg->tree_value;
+  int used = 0;
+  int i, lower_bound = 0, upper_bound = 0;
+  rtx reg = 0;
+  int partial = 0;
+
+  /* Push a new temporary level for any temporaries we make for
+     this argument.  */
+  push_temp_slots ();
+
+
+  /* copy from store_one_arg.  modify here after.*/
+  /* If this isn't going to be placed on both the stack and in registers,
+     set up the register and number of words.  */
+  if (! arg->pass_on_stack)
+    {
+      //if (flags & ECF_SIBCALL)
+      reg = arg->tail_call_reg;
+      //else
+      //reg = arg->reg;
+      partial = arg->partial;
+    }
+  /* Being passed entirely in a register.  We shouldn't be called in
+     this case.  */
+  gcc_assert (reg == 0 || partial != 0);
+  /* If this arg needs special alignment, don't load the registers
+     here.  */
+  if (arg->n_aligned_regs != 0)
+    reg = 0;
+
+
+
+
+  if (arg->value == arg->stack)
+    /* If the value is already in the stack slot, we are done.  */
+    ;
+  else if (arg->mode != BLKmode)
+    {
+      int size;
+
+      /* Argument is a scalar, not entirely passed in registers.
+	 (If part is passed in registers, arg->partial says how much
+	 and emit_push_insn will take care of putting it there.)
+
+	 Push it, and if its size is less than the
+	 amount of space allocated to it,
+	 also bump stack pointer by the additional space.
+	 Note that in C the default argument promotions
+	 will prevent such mismatches.  */
+
+      size = GET_MODE_SIZE (arg->mode);
+      /* Compute how much space the push instruction will push.
+	 On many machines, pushing a byte will advance the stack
+	 pointer by a halfword.  */
+#ifdef PUSH_ROUNDING
+      size = PUSH_ROUNDING (size);
+#endif
+      used = size;
+
+      /* Compute how much space the argument should get:
+	 round up to a multiple of the alignment for arguments.  */
+      if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
+	  used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
+		      / (PARM_BOUNDARY / BITS_PER_UNIT))
+		  * (PARM_BOUNDARY / BITS_PER_UNIT));
+
+      /* This isn't already where we want it on the stack, so put it there.
+	 This can either be done with push or copy insns.  */
+      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
+	      PARM_BOUNDARY, partial, reg, used - size, argblock,
+	      ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
+	      ARGS_SIZE_RTX (arg->locate.alignment_pad));
+
+      /* Unless this is a partially-in-register argument, the argument is now
+	 in the stack.  */
+      if (partial == 0)
+	  arg->value = arg->stack;
+    }
+  else
+    {
+      /* BLKmode, at least partly to be pushed.  */
+
+      unsigned int parm_align;
+      int excess;
+      rtx size_rtx;
+
+      /* Pushing a nonscalar.
+	 If part is passed in registers, PARTIAL says how much
+	 and emit_push_insn will take care of putting it there.  */
+
+      /* Round its size up to a multiple
+	 of the allocation unit for arguments.  */
+
+      if (arg->locate.size.var != 0)
+	{
+	  excess = 0;
+	  size_rtx = ARGS_SIZE_RTX (arg->locate.size);
+	}
+      else
+	{
+	  /* PUSH_ROUNDING has no effect on us, because emit_push_insn
+	     for BLKmode is careful to avoid it.  */
+	  excess = (arg->locate.size.constant
+		  - int_size_in_bytes (TREE_TYPE (pval))
+		  + partial);
+	  size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
+		  NULL_RTX, TYPE_MODE (sizetype), 0);
+	}
+
+      parm_align = arg->locate.boundary;
+
+      /* When an argument is padded down, the block is aligned to
+	 PARM_BOUNDARY, but the actual argument isn't.  */
+      if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
+	{
+	  if (arg->locate.size.var)
+	      parm_align = BITS_PER_UNIT;
+	  else if (excess)
+	    {
+	      unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
+	      parm_align = MIN (parm_align, excess_align);
+	    }
+	}
+
+      if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
+	{
+	  /* emit_push_insn might not work properly if arg->value and
+	     argblock + arg->locate.offset areas overlap.  */
+	  rtx x = arg->value;
+	  int i = 0;
+
+	  if (XEXP (x, 0) == crtl->args.internal_arg_pointer
+		  || (GET_CODE (XEXP (x, 0)) == PLUS
+		      && XEXP (XEXP (x, 0), 0) ==
+		      crtl->args.internal_arg_pointer
+		      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
+	    {
+	      if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
+		  i = INTVAL (XEXP (XEXP (x, 0), 1));
+
+	      /* expand_call should ensure this.  */
+	      gcc_assert (!arg->locate.offset.var
+		      && GET_CODE (size_rtx) == CONST_INT);
+	    }
+	}
+
+      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
+	      parm_align, partial, reg, excess, argblock,
+	      ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
+	      ARGS_SIZE_RTX (arg->locate.alignment_pad));
+
+      /* Unless this is a partially-in-register argument, the argument is now
+	 in the stack.
+
+	 ??? Unlike the case above, in which we want the actual
+	 address of the data, so that we can load it directly into a
+	 register, here we want the address of the stack slot, so that
+	 it's properly aligned for word-by-word copying or something
+	 like that.  It's not clear that this is always correct.  */
+      if (partial == 0)
+	  arg->value = arg->stack_slot;
+    }
+
+  if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
+    {
+      tree type = TREE_TYPE (arg->tree_value);
+      arg->parallel_value
+	  = emit_group_load_into_temps (arg->reg, arg->value, type,
+		  int_size_in_bytes (type));
+    }
+
+  /* Mark all slots this store used.  */
+  if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
+      && argblock && ! variable_size && arg->stack)
+    for (i = lower_bound; i < upper_bound; i++)
+      stack_usage_map[i] = 1;
+
+  /* Once we have pushed something, pops can't safely
+     be deferred during the rest of the arguments.  */
+  NO_DEFER_POP;
+
+  /* Free any temporary slots made in processing this argument.  Show
+     that we might have taken the address of something and pushed that
+     as an operand.  */
+  preserve_temp_slots (NULL_RTX);
+  free_temp_slots ();
+  pop_temp_slots ();
+
+  return 0;
+}
+
+static void
+determine_order(int *order, int num_actuals)
+{
+  int i;
+  for (i=0; i<num_actuals; i++)
+      order[i] = num_actuals-i-1;
+  return;
+}
+
+static void
+push_overlaps(struct arg_data *args, int num_actuals)
+{
+  int i;
+
+  for (i=0; i<num_actuals; i++)
+    {
+      int dst_offset; /*  */
+      int src_offset; /*  */
+      rtx temp;
+      if ( (dst_offset=check_frame_offset(args[i].stack)) < 0 ) continue;
+      if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue;
+
+      /* 退避  */
+      temp = assign_temp(args[i].tree_value, 1, 0, 0);
+      if ( args[i].mode==BLKmode )
+	  emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 );
+      else
+	  emit_move_insn ( temp, args[i].value );
+      args[i].value = temp;
+
+    }
+  return;
+}
+
+static int
+check_frame_offset(rtx x)
+{
+  int i;
+  rtx addr;
+  if ( !x || !MEM_P(x))
+      return -1;
+
+  addr = XEXP(x, 0);
+  if (addr == crtl->args.internal_arg_pointer)
+      i = 0;
+  else if (GET_CODE (addr) == PLUS
+	  && XEXP (addr, 0) == crtl->args.internal_arg_pointer
+	  && GET_CODE (XEXP (addr, 1)) == CONST_INT)
+      i = INTVAL (XEXP (addr, 1));
+  else if (GET_CODE (addr) == PLUS
+	  && GET_CODE (XEXP (addr, 0)) == CONST_INT
+	  && XEXP (addr, 1) == crtl->args.internal_arg_pointer )
+      i = INTVAL (XEXP (addr, 0));
+  else
+      return -1;
+
+  return i;
+}
+