Mercurial > hg > CbC > CbC_gcc
diff gcc/calls.c @ 70:b81903832de2
merge c-decl.c
author | Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Sun, 21 Aug 2011 09:24:16 +0900 |
parents | 1b10fe6932e1 |
children | ce75bd9117e4 |
line wrap: on
line diff
--- a/gcc/calls.c Sun Aug 21 07:53:12 2011 +0900 +++ b/gcc/calls.c Sun Aug 21 09:24:16 2011 +0900 @@ -99,9 +99,6 @@ word-sized pseudos we made. */ rtx *aligned_regs; int n_aligned_regs; -#ifndef noCbC -rtx exprs; -#endif }; /* A vector of one char per byte of stack space. A byte if nonzero if @@ -128,33 +125,33 @@ static int stack_arg_under_construction; static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, - HOST_WIDE_INT, rtx, rtx, int, rtx, int, - CUMULATIVE_ARGS *); + HOST_WIDE_INT, rtx, rtx, int, rtx, int, + CUMULATIVE_ARGS *); static void precompute_register_parameters (int, struct arg_data *, int *); static int store_one_arg (struct arg_data *, rtx, int, int, int); static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); static int finalize_must_preallocate (int, int, struct arg_data *, - struct args_size *); + struct args_size *); static void precompute_arguments (int, struct arg_data *); static int compute_argument_block_size (int, struct args_size *, tree, tree, int); static void initialize_argument_information (int, struct arg_data *, - struct args_size *, int, - tree, tree, - tree, tree, CUMULATIVE_ARGS *, int, - rtx *, int *, int *, int *, - bool *, bool); + struct args_size *, int, + tree, tree, + tree, tree, CUMULATIVE_ARGS *, int, + rtx *, int *, int *, int *, + bool *, bool); static void compute_argument_addresses (struct arg_data *, rtx, int); static rtx rtx_for_function_call (tree, tree); static void load_register_parameters (struct arg_data *, int, rtx *, int, - int, int *); + int, int *); static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, - enum machine_mode, int, va_list); + enum machine_mode, int, va_list); static int special_function_p (const_tree, int); static int check_sibcall_argument_overlap_1 (rtx); static int check_sibcall_argument_overlap (rtx, struct arg_data *, int); static int combine_pending_stack_adjustment_and_call (int, struct args_size *, - unsigned int); + unsigned int); static tree split_complex_types (tree); #ifdef REG_PARM_STACK_SPACE @@ -171,7 +168,7 @@ rtx prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value, - rtx *call_fusage, int reg_parm_seen, int sibcallp) + rtx *call_fusage, int reg_parm_seen, int sibcallp) { /* Make a valid memory address and copy constants through pseudo-regs, but not for a constant address if -fno-function-cse. */ @@ -186,7 +183,7 @@ { #ifndef NO_FUNCTION_CSE if (optimize && ! flag_no_function_cse) - funexp = force_reg (Pmode, funexp); + funexp = force_reg (Pmode, funexp); #endif } @@ -200,7 +197,7 @@ emit_move_insn (chain, static_chain_value); if (REG_P (chain)) - use_reg (call_fusage, chain); + use_reg (call_fusage, chain); } return funexp; @@ -250,13 +247,13 @@ static void emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, - tree funtype ATTRIBUTE_UNUSED, - HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, - HOST_WIDE_INT rounded_stack_size, - HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, - rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, - int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, - CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED) + tree funtype ATTRIBUTE_UNUSED, + HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, + HOST_WIDE_INT rounded_stack_size, + HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, + rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, + int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, + CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED) { rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); rtx call_insn; @@ -283,16 +280,16 @@ rtx pat; /* If this subroutine pops its own args, record that in the call insn - if possible, for the sake of frame pointer elimination. */ + if possible, for the sake of frame pointer elimination. */ if (valreg) - pat = GEN_SIBCALL_VALUE_POP (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - n_pop); + pat = GEN_SIBCALL_VALUE_POP (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + n_pop); else - pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, n_pop); + pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, n_pop); emit_call_insn (pat); already_popped = 1; @@ -316,15 +313,15 @@ rtx pat; /* If this subroutine pops its own args, record that in the call insn - if possible, for the sake of frame pointer elimination. */ + if possible, for the sake of frame pointer elimination. */ if (valreg) - pat = GEN_CALL_VALUE_POP (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, n_pop); + pat = GEN_CALL_VALUE_POP (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, n_pop); else - pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, n_pop); + pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, n_pop); emit_call_insn (pat); already_popped = 1; @@ -337,14 +334,14 @@ && HAVE_sibcall && HAVE_sibcall_value) { if (valreg) - emit_call_insn (GEN_SIBCALL_VALUE (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, - next_arg_reg, NULL_RTX)); + emit_call_insn (GEN_SIBCALL_VALUE (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, + next_arg_reg, NULL_RTX)); else - emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - GEN_INT (struct_value_size))); + emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + GEN_INT (struct_value_size))); } else #endif @@ -353,14 +350,14 @@ if (HAVE_call && HAVE_call_value) { if (valreg) - emit_call_insn (GEN_CALL_VALUE (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - NULL_RTX)); + emit_call_insn (GEN_CALL_VALUE (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + NULL_RTX)); else - emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - GEN_INT (struct_value_size))); + emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + GEN_INT (struct_value_size))); } else #endif @@ -410,10 +407,10 @@ if (n_popped > 0) { if (!already_popped) - CALL_INSN_FUNCTION_USAGE (call_insn) - = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), - CALL_INSN_FUNCTION_USAGE (call_insn)); + CALL_INSN_FUNCTION_USAGE (call_insn) + = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), + CALL_INSN_FUNCTION_USAGE (call_insn)); rounded_stack_size -= n_popped; rounded_stack_size_rtx = GEN_INT (rounded_stack_size); stack_pointer_delta -= n_popped; @@ -426,23 +423,23 @@ if (!ACCUMULATE_OUTGOING_ARGS) { /* If returning from the subroutine does not automatically pop the args, - we need an instruction to pop them sooner or later. - Perhaps do it now; perhaps just record how much space to pop later. - - If returning from the subroutine does pop the args, indicate that the - stack pointer will be changed. */ + we need an instruction to pop them sooner or later. + Perhaps do it now; perhaps just record how much space to pop later. + + If returning from the subroutine does pop the args, indicate that the + stack pointer will be changed. */ if (rounded_stack_size != 0) - { - if (ecf_flags & ECF_NORETURN) - /* Just pretend we did the pop. */ - stack_pointer_delta -= rounded_stack_size; - else if (flag_defer_pop && inhibit_defer_pop == 0 - && ! (ecf_flags & (ECF_CONST | ECF_PURE))) - pending_stack_adjust += rounded_stack_size; - else - adjust_stack (rounded_stack_size_rtx); - } + { + if (ecf_flags & ECF_NORETURN) + /* Just pretend we did the pop. */ + stack_pointer_delta -= rounded_stack_size; + else if (flag_defer_pop && inhibit_defer_pop == 0 + && ! (ecf_flags & (ECF_CONST | ECF_PURE))) + pending_stack_adjust += rounded_stack_size; + else + adjust_stack (rounded_stack_size_rtx); + } } /* When we accumulate outgoing args, we must avoid any stack manipulations. Restore the stack pointer to its original value now. Usually @@ -476,71 +473,71 @@ if (fndecl && DECL_NAME (fndecl) && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17 /* Exclude functions not at the file scope, or not `extern', - since they are not the magic functions we would otherwise - think they are. - FIXME: this should be handled with attributes, not with this - hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong - because you can declare fork() inside a function if you - wish. */ + since they are not the magic functions we would otherwise + think they are. + FIXME: this should be handled with attributes, not with this + hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong + because you can declare fork() inside a function if you + wish. */ && (DECL_CONTEXT (fndecl) == NULL_TREE - || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) + || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) && TREE_PUBLIC (fndecl)) { const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl)); const char *tname = name; /* We assume that alloca will always be called by name. It - makes no sense to pass it as a pointer-to-function to - anything that does not understand its behavior. */ + makes no sense to pass it as a pointer-to-function to + anything that does not understand its behavior. */ if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6 - && name[0] == 'a' - && ! strcmp (name, "alloca")) - || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16 - && name[0] == '_' - && ! strcmp (name, "__builtin_alloca")))) - flags |= ECF_MAY_BE_ALLOCA; + && name[0] == 'a' + && ! strcmp (name, "alloca")) + || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16 + && name[0] == '_' + && ! strcmp (name, "__builtin_alloca")))) + flags |= ECF_MAY_BE_ALLOCA; /* Disregard prefix _, __, __x or __builtin_. */ if (name[0] == '_') - { - if (name[1] == '_' - && name[2] == 'b' - && !strncmp (name + 3, "uiltin_", 7)) - tname += 10; - else if (name[1] == '_' && name[2] == 'x') - tname += 3; - else if (name[1] == '_') - tname += 2; - else - tname += 1; - } + { + if (name[1] == '_' + && name[2] == 'b' + && !strncmp (name + 3, "uiltin_", 7)) + tname += 10; + else if (name[1] == '_' && name[2] == 'x') + tname += 3; + else if (name[1] == '_') + tname += 2; + else + tname += 1; + } if (tname[0] == 's') - { - if ((tname[1] == 'e' - && (! strcmp (tname, "setjmp") - || ! strcmp (tname, "setjmp_syscall"))) - || (tname[1] == 'i' - && ! strcmp (tname, "sigsetjmp")) - || (tname[1] == 'a' - && ! strcmp (tname, "savectx"))) - flags |= ECF_RETURNS_TWICE; - - if (tname[1] == 'i' - && ! strcmp (tname, "siglongjmp")) - flags |= ECF_NORETURN; - } + { + if ((tname[1] == 'e' + && (! strcmp (tname, "setjmp") + || ! strcmp (tname, "setjmp_syscall"))) + || (tname[1] == 'i' + && ! strcmp (tname, "sigsetjmp")) + || (tname[1] == 'a' + && ! strcmp (tname, "savectx"))) + flags |= ECF_RETURNS_TWICE; + + if (tname[1] == 'i' + && ! strcmp (tname, "siglongjmp")) + flags |= ECF_NORETURN; + } else if ((tname[0] == 'q' && tname[1] == 's' - && ! strcmp (tname, "qsetjmp")) - || (tname[0] == 'v' && tname[1] == 'f' - && ! strcmp (tname, "vfork")) - || (tname[0] == 'g' && tname[1] == 'e' - && !strcmp (tname, "getcontext"))) - flags |= ECF_RETURNS_TWICE; + && ! strcmp (tname, "qsetjmp")) + || (tname[0] == 'v' && tname[1] == 'f' + && ! strcmp (tname, "vfork")) + || (tname[0] == 'g' && tname[1] == 'e' + && !strcmp (tname, "getcontext"))) + flags |= ECF_RETURNS_TWICE; else if (tname[0] == 'l' && tname[1] == 'o' - && ! strcmp (tname, "longjmp")) - flags |= ECF_NORETURN; + && ! strcmp (tname, "longjmp")) + flags |= ECF_NORETURN; } return flags; @@ -581,7 +578,7 @@ && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL) && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0) - & ECF_MAY_BE_ALLOCA)) + & ECF_MAY_BE_ALLOCA)) return true; return false; } @@ -597,19 +594,19 @@ { /* The function exp may have the `malloc' attribute. */ if (DECL_IS_MALLOC (exp)) - flags |= ECF_MALLOC; + flags |= ECF_MALLOC; /* The function exp may have the `returns_twice' attribute. */ if (DECL_IS_RETURNS_TWICE (exp)) - flags |= ECF_RETURNS_TWICE; + flags |= ECF_RETURNS_TWICE; /* Process the pure and const attributes. */ if (TREE_READONLY (exp)) flags |= ECF_CONST; if (DECL_PURE_P (exp)) - flags |= ECF_PURE; + flags |= ECF_PURE; if (DECL_LOOPING_CONST_OR_PURE_P (exp)) - flags |= ECF_LOOPING_CONST_OR_PURE; + flags |= ECF_LOOPING_CONST_OR_PURE; if (DECL_IS_NOVOPS (exp)) flags |= ECF_NOVOPS; @@ -617,7 +614,7 @@ flags |= ECF_LEAF; if (TREE_NOTHROW (exp)) - flags |= ECF_NOTHROW; + flags |= ECF_NOTHROW; flags = special_function_p (exp, flags); } @@ -648,9 +645,9 @@ { t = TREE_TYPE (CALL_EXPR_FN (t)); if (t && TREE_CODE (t) == POINTER_TYPE) - flags = flags_from_decl_or_type (TREE_TYPE (t)); + flags = flags_from_decl_or_type (TREE_TYPE (t)); else - flags = 0; + flags = 0; } return flags; @@ -665,7 +662,7 @@ static void precompute_register_parameters (int num_actuals, struct arg_data *args, - int *reg_parm_seen) + int *reg_parm_seen) { int i; @@ -754,51 +751,51 @@ for (low = 0; low < high; low++) if (stack_usage_map[low] != 0) { - int num_to_save; - enum machine_mode save_mode; - int delta; - rtx stack_area; - rtx save_area; - - while (stack_usage_map[--high] == 0) - ; - - *low_to_save = low; - *high_to_save = high; - - num_to_save = high - low + 1; - save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); - - /* If we don't have the required alignment, must do this - in BLKmode. */ - if ((low & (MIN (GET_MODE_SIZE (save_mode), - BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) - save_mode = BLKmode; + int num_to_save; + enum machine_mode save_mode; + int delta; + rtx stack_area; + rtx save_area; + + while (stack_usage_map[--high] == 0) + ; + + *low_to_save = low; + *high_to_save = high; + + num_to_save = high - low + 1; + save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); + + /* If we don't have the required alignment, must do this + in BLKmode. */ + if ((low & (MIN (GET_MODE_SIZE (save_mode), + BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) + save_mode = BLKmode; #ifdef ARGS_GROW_DOWNWARD - delta = -high; + delta = -high; #else - delta = low; + delta = low; #endif - stack_area = gen_rtx_MEM (save_mode, - memory_address (save_mode, - plus_constant (argblock, - delta))); - - set_mem_align (stack_area, PARM_BOUNDARY); - if (save_mode == BLKmode) - { - save_area = assign_stack_temp (BLKmode, num_to_save, 0); - emit_block_move (validize_mem (save_area), stack_area, - GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); - } - else - { - save_area = gen_reg_rtx (save_mode); - emit_move_insn (save_area, stack_area); - } - - return save_area; + stack_area = gen_rtx_MEM (save_mode, + memory_address (save_mode, + plus_constant (argblock, + delta))); + + set_mem_align (stack_area, PARM_BOUNDARY); + if (save_mode == BLKmode) + { + save_area = assign_stack_temp (BLKmode, num_to_save, 0); + emit_block_move (validize_mem (save_area), stack_area, + GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); + } + else + { + save_area = gen_reg_rtx (save_mode); + emit_move_insn (save_area, stack_area); + } + + return save_area; } return NULL_RTX; @@ -817,16 +814,16 @@ delta = low_to_save; #endif stack_area = gen_rtx_MEM (save_mode, - memory_address (save_mode, - plus_constant (argblock, delta))); + memory_address (save_mode, + plus_constant (argblock, delta))); set_mem_align (stack_area, PARM_BOUNDARY); if (save_mode != BLKmode) emit_move_insn (stack_area, save_area); else emit_block_move (stack_area, validize_mem (save_area), - GEN_INT (high_to_save - low_to_save + 1), - BLOCK_OP_CALL_PARM); + GEN_INT (high_to_save - low_to_save + 1), + BLOCK_OP_CALL_PARM); } #endif /* REG_PARM_STACK_SPACE */ @@ -846,38 +843,38 @@ for (i = 0; i < num_actuals; i++) if (args[i].reg != 0 && ! args[i].pass_on_stack - && args[i].mode == BLKmode - && MEM_P (args[i].value) - && (MEM_ALIGN (args[i].value) - < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) + && args[i].mode == BLKmode + && MEM_P (args[i].value) + && (MEM_ALIGN (args[i].value) + < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) { - int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - int endian_correction = 0; - - if (args[i].partial) - { - gcc_assert (args[i].partial % UNITS_PER_WORD == 0); - args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; - } - else - { - args[i].n_aligned_regs - = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; - } - - args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); - - /* Structures smaller than a word are normally aligned to the - least significant byte. On a BYTES_BIG_ENDIAN machine, - this means we must skip the empty high order bytes when - calculating the bit offset. */ - if (bytes < UNITS_PER_WORD + int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + int endian_correction = 0; + + if (args[i].partial) + { + gcc_assert (args[i].partial % UNITS_PER_WORD == 0); + args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; + } + else + { + args[i].n_aligned_regs + = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; + } + + args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); + + /* Structures smaller than a word are normally aligned to the + least significant byte. On a BYTES_BIG_ENDIAN machine, + this means we must skip the empty high order bytes when + calculating the bit offset. */ + if (bytes < UNITS_PER_WORD #ifdef BLOCK_REG_PADDING - && (BLOCK_REG_PADDING (args[i].mode, - TREE_TYPE (args[i].tree_value), 1) - == downward) + && (BLOCK_REG_PADDING (args[i].mode, + TREE_TYPE (args[i].tree_value), 1) + == downward) #else - && BYTES_BIG_ENDIAN + && BYTES_BIG_ENDIAN #endif ) endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; @@ -944,16 +941,16 @@ static void initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, - struct arg_data *args, - struct args_size *args_size, - int n_named_args ATTRIBUTE_UNUSED, - tree exp, tree struct_value_addr_value, - tree fndecl, tree fntype, - CUMULATIVE_ARGS *args_so_far, - int reg_parm_stack_space, - rtx *old_stack_level, int *old_pending_adj, - int *must_preallocate, int *ecf_flags, - bool *may_tailcall, bool call_from_thunk_p) + struct arg_data *args, + struct args_size *args_size, + int n_named_args ATTRIBUTE_UNUSED, + tree exp, tree struct_value_addr_value, + tree fndecl, tree fntype, + CUMULATIVE_ARGS *args_so_far, + int reg_parm_stack_space, + rtx *old_stack_level, int *old_pending_adj, + int *must_preallocate, int *ecf_flags, + bool *may_tailcall, bool call_from_thunk_p) { location_t loc = EXPR_LOCATION (exp); /* 1 if scanning parms front to back, -1 if scanning back to front. */ @@ -975,7 +972,7 @@ { i = num_actuals - 1, inc = -1; /* In this case, must reverse order of args - so that we compute and push the last arg first. */ + so that we compute and push the last arg first. */ } else { @@ -991,25 +988,25 @@ if (struct_value_addr_value) { - args[j].tree_value = struct_value_addr_value; - j += inc; + args[j].tree_value = struct_value_addr_value; + j += inc; } FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) { - tree argtype = TREE_TYPE (arg); - if (targetm.calls.split_complex_arg - && argtype - && TREE_CODE (argtype) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (argtype)) - { - tree subtype = TREE_TYPE (argtype); - args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); - j += inc; - args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); - } - else - args[j].tree_value = arg; - j += inc; + tree argtype = TREE_TYPE (arg); + if (targetm.calls.split_complex_arg + && argtype + && TREE_CODE (argtype) == COMPLEX_TYPE + && targetm.calls.split_complex_arg (argtype)) + { + tree subtype = TREE_TYPE (argtype); + args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); + j += inc; + args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); + } + else + args[j].tree_value = arg; + j += inc; } } @@ -1022,7 +1019,7 @@ /* Replace erroneous argument with constant zero. */ if (type == error_mark_node || !COMPLETE_TYPE_P (type)) - args[i].tree_value = integer_zero_node, type = integer_type_node; + args[i].tree_value = integer_zero_node, type = integer_type_node; /* If TYPE is a transparent union or record, pass things the way we would pass the first field of the union or record. We have @@ -1033,17 +1030,17 @@ /* Decide where to pass this arg. - args[i].reg is nonzero if all or part is passed in registers. - - args[i].partial is nonzero if part but not all is passed in registers, - and the exact value says how many bytes are passed in registers. - - args[i].pass_on_stack is nonzero if the argument must at least be - computed on the stack. It may then be loaded back into registers - if args[i].reg is nonzero. - - These decisions are driven by the FUNCTION_... macros and must agree - with those made by function.c. */ + args[i].reg is nonzero if all or part is passed in registers. + + args[i].partial is nonzero if part but not all is passed in registers, + and the exact value says how many bytes are passed in registers. + + args[i].pass_on_stack is nonzero if the argument must at least be + computed on the stack. It may then be loaded back into registers + if args[i].reg is nonzero. + + These decisions are driven by the FUNCTION_... macros and must agree + with those made by function.c. */ /* See if this argument should be passed by invisible reference. */ if (pass_by_reference (args_so_far, TYPE_MODE (type), @@ -1136,7 +1133,7 @@ unsignedp = TYPE_UNSIGNED (type); mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, - fndecl ? TREE_TYPE (fndecl) : fntype, 0); + fndecl ? TREE_TYPE (fndecl) : fntype, 0); args[i].unsignedp = unsignedp; args[i].mode = mode; @@ -1155,57 +1152,57 @@ args[i].tail_call_reg = args[i].reg; if (args[i].reg) - args[i].partial - = targetm.calls.arg_partial_bytes (args_so_far, mode, type, - argpos < n_named_args); + args[i].partial + = targetm.calls.arg_partial_bytes (args_so_far, mode, type, + argpos < n_named_args); args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type); /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), - it means that we are to pass this arg in the register(s) designated - by the PARALLEL, but also to pass it in the stack. */ + it means that we are to pass this arg in the register(s) designated + by the PARALLEL, but also to pass it in the stack. */ if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL - && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) - args[i].pass_on_stack = 1; + && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) + args[i].pass_on_stack = 1; /* If this is an addressable type, we must preallocate the stack - since we must evaluate the object into its final location. - - If this is to be passed in both registers and the stack, it is simpler - to preallocate. */ + since we must evaluate the object into its final location. + + If this is to be passed in both registers and the stack, it is simpler + to preallocate. */ if (TREE_ADDRESSABLE (type) - || (args[i].pass_on_stack && args[i].reg != 0)) - *must_preallocate = 1; + || (args[i].pass_on_stack && args[i].reg != 0)) + *must_preallocate = 1; /* Compute the stack-size of this argument. */ if (args[i].reg == 0 || args[i].partial != 0 - || reg_parm_stack_space > 0 - || args[i].pass_on_stack) - locate_and_pad_parm (mode, type, + || reg_parm_stack_space > 0 + || args[i].pass_on_stack) + locate_and_pad_parm (mode, type, #ifdef STACK_PARMS_IN_REG_PARM_AREA - 1, + 1, #else - args[i].reg != 0, + args[i].reg != 0, #endif - args[i].pass_on_stack ? 0 : args[i].partial, - fndecl, args_size, &args[i].locate); + args[i].pass_on_stack ? 0 : args[i].partial, + fndecl, args_size, &args[i].locate); #ifdef BLOCK_REG_PADDING else - /* The argument is passed entirely in registers. See at which - end it should be padded. */ - args[i].locate.where_pad = - BLOCK_REG_PADDING (mode, type, - int_size_in_bytes (type) <= UNITS_PER_WORD); + /* The argument is passed entirely in registers. See at which + end it should be padded. */ + args[i].locate.where_pad = + BLOCK_REG_PADDING (mode, type, + int_size_in_bytes (type) <= UNITS_PER_WORD); #endif /* Update ARGS_SIZE, the total stack space for args so far. */ args_size->constant += args[i].locate.size.constant; if (args[i].locate.size.var) - ADD_PARM_SIZE (*args_size, args[i].locate.size.var); + ADD_PARM_SIZE (*args_size, args[i].locate.size.var); /* Increment ARGS_SO_FAR, which has info about which arg-registers - have been used, etc. */ + have been used, etc. */ targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type), type, argpos < n_named_args); @@ -1220,10 +1217,10 @@ static int compute_argument_block_size (int reg_parm_stack_space, - struct args_size *args_size, - tree fndecl ATTRIBUTE_UNUSED, - tree fntype ATTRIBUTE_UNUSED, - int preferred_stack_boundary ATTRIBUTE_UNUSED) + struct args_size *args_size, + tree fndecl ATTRIBUTE_UNUSED, + tree fntype ATTRIBUTE_UNUSED, + int preferred_stack_boundary ATTRIBUTE_UNUSED) { int unadjusted_args_size = args_size->constant; @@ -1244,45 +1241,45 @@ preferred_stack_boundary /= BITS_PER_UNIT; if (preferred_stack_boundary > 1) - { - /* We don't handle this case yet. To handle it correctly we have - to add the delta, round and subtract the delta. - Currently no machine description requires this support. */ - gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); - args_size->var = round_up (args_size->var, preferred_stack_boundary); - } + { + /* We don't handle this case yet. To handle it correctly we have + to add the delta, round and subtract the delta. + Currently no machine description requires this support. */ + gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); + args_size->var = round_up (args_size->var, preferred_stack_boundary); + } if (reg_parm_stack_space > 0) - { - args_size->var - = size_binop (MAX_EXPR, args_size->var, - ssize_int (reg_parm_stack_space)); - - /* The area corresponding to register parameters is not to count in - the size of the block we need. So make the adjustment. */ - if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - args_size->var - = size_binop (MINUS_EXPR, args_size->var, - ssize_int (reg_parm_stack_space)); - } + { + args_size->var + = size_binop (MAX_EXPR, args_size->var, + ssize_int (reg_parm_stack_space)); + + /* The area corresponding to register parameters is not to count in + the size of the block we need. So make the adjustment. */ + if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) + args_size->var + = size_binop (MINUS_EXPR, args_size->var, + ssize_int (reg_parm_stack_space)); + } } else { preferred_stack_boundary /= BITS_PER_UNIT; if (preferred_stack_boundary < 1) - preferred_stack_boundary = 1; + preferred_stack_boundary = 1; args_size->constant = (((args_size->constant - + stack_pointer_delta - + preferred_stack_boundary - 1) - / preferred_stack_boundary - * preferred_stack_boundary) - - stack_pointer_delta); + + stack_pointer_delta + + preferred_stack_boundary - 1) + / preferred_stack_boundary + * preferred_stack_boundary) + - stack_pointer_delta); args_size->constant = MAX (args_size->constant, - reg_parm_stack_space); + reg_parm_stack_space); if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - args_size->constant -= reg_parm_stack_space; + args_size->constant -= reg_parm_stack_space; } return unadjusted_args_size; } @@ -1321,37 +1318,37 @@ enum machine_mode mode; if (TREE_CODE (args[i].tree_value) != CALL_EXPR) - continue; + continue; /* If this is an addressable type, we cannot pre-evaluate it. */ type = TREE_TYPE (args[i].tree_value); gcc_assert (!TREE_ADDRESSABLE (type)); args[i].initial_value = args[i].value - = expand_normal (args[i].tree_value); + = expand_normal (args[i].tree_value); mode = TYPE_MODE (type); if (mode != args[i].mode) - { - int unsignedp = args[i].unsignedp; - args[i].value - = convert_modes (args[i].mode, mode, - args[i].value, args[i].unsignedp); - - /* CSE will replace this only if it contains args[i].value - pseudo, so convert it down to the declared mode using - a SUBREG. */ - if (REG_P (args[i].value) - && GET_MODE_CLASS (args[i].mode) == MODE_INT - && promote_mode (type, mode, &unsignedp) != args[i].mode) - { - args[i].initial_value - = gen_lowpart_SUBREG (mode, args[i].value); - SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; - SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, - args[i].unsignedp); - } - } + { + int unsignedp = args[i].unsignedp; + args[i].value + = convert_modes (args[i].mode, mode, + args[i].value, args[i].unsignedp); + + /* CSE will replace this only if it contains args[i].value + pseudo, so convert it down to the declared mode using + a SUBREG. */ + if (REG_P (args[i].value) + && GET_MODE_CLASS (args[i].mode) == MODE_INT + && promote_mode (type, mode, &unsignedp) != args[i].mode) + { + args[i].initial_value + = gen_lowpart_SUBREG (mode, args[i].value); + SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; + SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, + args[i].unsignedp); + } + } } } @@ -1361,7 +1358,7 @@ static int finalize_must_preallocate (int must_preallocate, int num_actuals, - struct arg_data *args, struct args_size *args_size) + struct arg_data *args, struct args_size *args_size) { /* See if we have or want to preallocate stack space. @@ -1389,24 +1386,24 @@ int i; for (i = 0; i < num_actuals && ! must_preallocate; i++) - { - if (args[i].partial > 0 && ! args[i].pass_on_stack) - partial_seen = 1; - else if (partial_seen && args[i].reg == 0) - must_preallocate = 1; - - if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode - && (TREE_CODE (args[i].tree_value) == CALL_EXPR - || TREE_CODE (args[i].tree_value) == TARGET_EXPR - || TREE_CODE (args[i].tree_value) == COND_EXPR - || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) - copy_to_evaluate_size - += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - } + { + if (args[i].partial > 0 && ! args[i].pass_on_stack) + partial_seen = 1; + else if (partial_seen && args[i].reg == 0) + must_preallocate = 1; + + if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode + && (TREE_CODE (args[i].tree_value) == CALL_EXPR + || TREE_CODE (args[i].tree_value) == TARGET_EXPR + || TREE_CODE (args[i].tree_value) == COND_EXPR + || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) + copy_to_evaluate_size + += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + } if (copy_to_evaluate_size * 2 >= args_size->constant - && args_size->constant > 0) - must_preallocate = 1; + && args_size->constant > 0) + must_preallocate = 1; } return must_preallocate; } @@ -1428,87 +1425,87 @@ int i, arg_offset = 0; if (GET_CODE (argblock) == PLUS) - arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); + arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); for (i = 0; i < num_actuals; i++) - { - rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); - rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); - rtx addr; - unsigned int align, boundary; - unsigned int units_on_stack = 0; - enum machine_mode partial_mode = VOIDmode; - - /* Skip this parm if it will not be passed on the stack. */ - if (! args[i].pass_on_stack - && args[i].reg != 0 - && args[i].partial == 0) - continue; - - if (CONST_INT_P (offset)) - addr = plus_constant (arg_reg, INTVAL (offset)); - else - addr = gen_rtx_PLUS (Pmode, arg_reg, offset); - - addr = plus_constant (addr, arg_offset); - - if (args[i].partial != 0) - { - /* Only part of the parameter is being passed on the stack. - Generate a simple memory reference of the correct size. */ - units_on_stack = args[i].locate.size.constant; - partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT, - MODE_INT, 1); - args[i].stack = gen_rtx_MEM (partial_mode, addr); - set_mem_size (args[i].stack, GEN_INT (units_on_stack)); - } - else - { - args[i].stack = gen_rtx_MEM (args[i].mode, addr); - set_mem_attributes (args[i].stack, - TREE_TYPE (args[i].tree_value), 1); - } - align = BITS_PER_UNIT; - boundary = args[i].locate.boundary; - if (args[i].locate.where_pad != downward) - align = boundary; - else if (CONST_INT_P (offset)) - { - align = INTVAL (offset) * BITS_PER_UNIT | boundary; - align = align & -align; - } - set_mem_align (args[i].stack, align); - - if (CONST_INT_P (slot_offset)) - addr = plus_constant (arg_reg, INTVAL (slot_offset)); - else - addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); - - addr = plus_constant (addr, arg_offset); - - if (args[i].partial != 0) - { - /* Only part of the parameter is being passed on the stack. - Generate a simple memory reference of the correct size. - */ - args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); - set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack)); - } - else - { - args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); - set_mem_attributes (args[i].stack_slot, - TREE_TYPE (args[i].tree_value), 1); - } - set_mem_align (args[i].stack_slot, args[i].locate.boundary); - - /* Function incoming arguments may overlap with sibling call - outgoing arguments and we cannot allow reordering of reads - from function arguments with stores to outgoing arguments - of sibling calls. */ - set_mem_alias_set (args[i].stack, 0); - set_mem_alias_set (args[i].stack_slot, 0); - } + { + rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); + rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); + rtx addr; + unsigned int align, boundary; + unsigned int units_on_stack = 0; + enum machine_mode partial_mode = VOIDmode; + + /* Skip this parm if it will not be passed on the stack. */ + if (! args[i].pass_on_stack + && args[i].reg != 0 + && args[i].partial == 0) + continue; + + if (CONST_INT_P (offset)) + addr = plus_constant (arg_reg, INTVAL (offset)); + else + addr = gen_rtx_PLUS (Pmode, arg_reg, offset); + + addr = plus_constant (addr, arg_offset); + + if (args[i].partial != 0) + { + /* Only part of the parameter is being passed on the stack. + Generate a simple memory reference of the correct size. */ + units_on_stack = args[i].locate.size.constant; + partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT, + MODE_INT, 1); + args[i].stack = gen_rtx_MEM (partial_mode, addr); + set_mem_size (args[i].stack, GEN_INT (units_on_stack)); + } + else + { + args[i].stack = gen_rtx_MEM (args[i].mode, addr); + set_mem_attributes (args[i].stack, + TREE_TYPE (args[i].tree_value), 1); + } + align = BITS_PER_UNIT; + boundary = args[i].locate.boundary; + if (args[i].locate.where_pad != downward) + align = boundary; + else if (CONST_INT_P (offset)) + { + align = INTVAL (offset) * BITS_PER_UNIT | boundary; + align = align & -align; + } + set_mem_align (args[i].stack, align); + + if (CONST_INT_P (slot_offset)) + addr = plus_constant (arg_reg, INTVAL (slot_offset)); + else + addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); + + addr = plus_constant (addr, arg_offset); + + if (args[i].partial != 0) + { + /* Only part of the parameter is being passed on the stack. + Generate a simple memory reference of the correct size. + */ + args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); + set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack)); + } + else + { + args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); + set_mem_attributes (args[i].stack_slot, + TREE_TYPE (args[i].tree_value), 1); + } + set_mem_align (args[i].stack_slot, args[i].locate.boundary); + + /* Function incoming arguments may overlap with sibling call + outgoing arguments and we cannot allow reordering of reads + from function arguments with stores to outgoing arguments + of sibling calls. */ + set_mem_alias_set (args[i].stack, 0); + set_mem_alias_set (args[i].stack_slot, 0); + } } } @@ -1529,12 +1526,12 @@ if (fndecl) { /* If this is the first use of the function, see if we need to - make an external definition for it. */ + make an external definition for it. */ if (!TREE_USED (fndecl) && fndecl != current_function_decl) - { - assemble_external (fndecl); - TREE_USED (fndecl) = 1; - } + { + assemble_external (fndecl); + TREE_USED (fndecl) = 1; + } /* Get a SYMBOL_REF rtx for the function address. */ funexp = XEXP (DECL_RTL (fndecl), 0); @@ -1544,7 +1541,7 @@ { push_temp_slots (); funexp = expand_normal (addr); - pop_temp_slots (); /* FUNEXP can't be BLKmode. */ + pop_temp_slots (); /* FUNEXP can't be BLKmode. */ } return funexp; } @@ -1562,13 +1559,13 @@ if (addr == crtl->args.internal_arg_pointer) i = 0; else if (GET_CODE (addr) == PLUS - && XEXP (addr, 0) == crtl->args.internal_arg_pointer - && CONST_INT_P (XEXP (addr, 1))) + && XEXP (addr, 0) == crtl->args.internal_arg_pointer + && CONST_INT_P (XEXP (addr, 1))) i = INTVAL (XEXP (addr, 1)); /* Return true for arg pointer based indexed addressing. */ else if (GET_CODE (addr) == PLUS - && (XEXP (addr, 0) == crtl->args.internal_arg_pointer - || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) + && (XEXP (addr, 0) == crtl->args.internal_arg_pointer + || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) return true; else return false; @@ -1581,9 +1578,9 @@ unsigned HOST_WIDE_INT k; for (k = 0; k < size; k++) - if (i + k < stored_args_map->n_bits - && TEST_BIT (stored_args_map, i + k)) - return true; + if (i + k < stored_args_map->n_bits + && TEST_BIT (stored_args_map, i + k)) + return true; } return false; @@ -1601,76 +1598,76 @@ static void load_register_parameters (struct arg_data *args, int num_actuals, - rtx *call_fusage, int flags, int is_sibcall, - int *sibcall_failure) + rtx *call_fusage, int flags, int is_sibcall, + int *sibcall_failure) { int i, j; for (i = 0; i < num_actuals; i++) { rtx reg = ((flags & ECF_SIBCALL) - ? args[i].tail_call_reg : args[i].reg); + ? args[i].tail_call_reg : args[i].reg); if (reg) - { - int partial = args[i].partial; - int nregs; - int size = 0; - rtx before_arg = get_last_insn (); - /* Set non-negative if we must move a word at a time, even if - just one word (e.g, partial == 4 && mode == DFmode). Set - to -1 if we just use a normal move insn. This value can be - zero if the argument is a zero size structure. */ - nregs = -1; - if (GET_CODE (reg) == PARALLEL) - ; - else if (partial) - { - gcc_assert (partial % UNITS_PER_WORD == 0); - nregs = partial / UNITS_PER_WORD; - } - else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) - { - size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; - } - else - size = GET_MODE_SIZE (args[i].mode); - - /* Handle calls that pass values in multiple non-contiguous - locations. The Irix 6 ABI has examples of this. */ - - if (GET_CODE (reg) == PARALLEL) - emit_group_move (reg, args[i].parallel_value); - - /* If simple case, just do move. If normal partial, store_one_arg - has already loaded the register for us. In all other cases, - load the register(s) from memory. */ - - else if (nregs == -1) - { - emit_move_insn (reg, args[i].value); + { + int partial = args[i].partial; + int nregs; + int size = 0; + rtx before_arg = get_last_insn (); + /* Set non-negative if we must move a word at a time, even if + just one word (e.g, partial == 4 && mode == DFmode). Set + to -1 if we just use a normal move insn. This value can be + zero if the argument is a zero size structure. */ + nregs = -1; + if (GET_CODE (reg) == PARALLEL) + ; + else if (partial) + { + gcc_assert (partial % UNITS_PER_WORD == 0); + nregs = partial / UNITS_PER_WORD; + } + else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) + { + size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; + } + else + size = GET_MODE_SIZE (args[i].mode); + + /* Handle calls that pass values in multiple non-contiguous + locations. The Irix 6 ABI has examples of this. */ + + if (GET_CODE (reg) == PARALLEL) + emit_group_move (reg, args[i].parallel_value); + + /* If simple case, just do move. If normal partial, store_one_arg + has already loaded the register for us. In all other cases, + load the register(s) from memory. */ + + else if (nregs == -1) + { + emit_move_insn (reg, args[i].value); #ifdef BLOCK_REG_PADDING - /* Handle case where we have a value that needs shifting - up to the msb. eg. a QImode value and we're padding - upward on a BYTES_BIG_ENDIAN machine. */ - if (size < UNITS_PER_WORD - && (args[i].locate.where_pad - == (BYTES_BIG_ENDIAN ? upward : downward))) - { - rtx x; - int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; - - /* Assigning REG here rather than a temp makes CALL_FUSAGE - report the whole reg as used. Strictly speaking, the - call only uses SIZE bytes at the msb end, but it doesn't - seem worth generating rtl to say that. */ - reg = gen_rtx_REG (word_mode, REGNO (reg)); - x = expand_shift (LSHIFT_EXPR, word_mode, reg, - build_int_cst (NULL_TREE, shift), - reg, 1); - if (x != reg) - emit_move_insn (reg, x); - } + /* Handle case where we have a value that needs shifting + up to the msb. eg. a QImode value and we're padding + upward on a BYTES_BIG_ENDIAN machine. */ + if (size < UNITS_PER_WORD + && (args[i].locate.where_pad + == (BYTES_BIG_ENDIAN ? upward : downward))) + { + rtx x; + int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; + + /* Assigning REG here rather than a temp makes CALL_FUSAGE + report the whole reg as used. Strictly speaking, the + call only uses SIZE bytes at the msb end, but it doesn't + seem worth generating rtl to say that. */ + reg = gen_rtx_REG (word_mode, REGNO (reg)); + x = expand_shift (LSHIFT_EXPR, word_mode, reg, + build_int_cst (NULL_TREE, shift), + reg, 1); + if (x != reg) + emit_move_insn (reg, x); + } #endif } @@ -1697,46 +1694,46 @@ /* Handle a BLKmode that needs shifting. */ if (nregs == 1 && size < UNITS_PER_WORD #ifdef BLOCK_REG_PADDING - && args[i].locate.where_pad == downward + && args[i].locate.where_pad == downward #else - && BYTES_BIG_ENDIAN + && BYTES_BIG_ENDIAN #endif - ) - { - rtx tem = operand_subword_force (mem, 0, args[i].mode); - rtx ri = gen_rtx_REG (word_mode, REGNO (reg)); - rtx x = gen_reg_rtx (word_mode); - int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; - enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR - : LSHIFT_EXPR; - - emit_move_insn (x, tem); - x = expand_shift (dir, word_mode, x, - build_int_cst (NULL_TREE, shift), - ri, 1); - if (x != ri) - emit_move_insn (ri, x); - } - else - move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); - } - - /* When a parameter is a block, and perhaps in other cases, it is - possible that it did a load from an argument slot that was - already clobbered. */ - if (is_sibcall - && check_sibcall_argument_overlap (before_arg, &args[i], 0)) - *sibcall_failure = 1; - - /* Handle calls that pass values in multiple non-contiguous - locations. The Irix 6 ABI has examples of this. */ - if (GET_CODE (reg) == PARALLEL) - use_group_regs (call_fusage, reg); - else if (nregs == -1) - use_reg (call_fusage, reg); - else if (nregs > 0) - use_regs (call_fusage, REGNO (reg), nregs); - } + ) + { + rtx tem = operand_subword_force (mem, 0, args[i].mode); + rtx ri = gen_rtx_REG (word_mode, REGNO (reg)); + rtx x = gen_reg_rtx (word_mode); + int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; + enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR + : LSHIFT_EXPR; + + emit_move_insn (x, tem); + x = expand_shift (dir, word_mode, x, + build_int_cst (NULL_TREE, shift), + ri, 1); + if (x != ri) + emit_move_insn (ri, x); + } + else + move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); + } + + /* When a parameter is a block, and perhaps in other cases, it is + possible that it did a load from an argument slot that was + already clobbered. */ + if (is_sibcall + && check_sibcall_argument_overlap (before_arg, &args[i], 0)) + *sibcall_failure = 1; + + /* Handle calls that pass values in multiple non-contiguous + locations. The Irix 6 ABI has examples of this. */ + if (GET_CODE (reg) == PARALLEL) + use_group_regs (call_fusage, reg); + else if (nregs == -1) + use_reg (call_fusage, reg); + else if (nregs > 0) + use_regs (call_fusage, REGNO (reg), nregs); + } } } @@ -1751,8 +1748,8 @@ static int combine_pending_stack_adjustment_and_call (int unadjusted_args_size, - struct args_size *args_size, - unsigned int preferred_unit_stack_boundary) + struct args_size *args_size, + unsigned int preferred_unit_stack_boundary) { /* The number of bytes to pop so that the stack will be under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ @@ -1781,9 +1778,9 @@ if (preferred_unit_stack_boundary > 1) { if (unadjusted_alignment > 0) - adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; + adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; else - adjustment += unadjusted_alignment; + adjustment += unadjusted_alignment; } /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of @@ -1816,23 +1813,23 @@ if (code == MEM) return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0), - GET_MODE_SIZE (GET_MODE (x))); + GET_MODE_SIZE (GET_MODE (x))); /* Scan all subexpressions. */ fmt = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) { if (*fmt == 'e') - { - if (check_sibcall_argument_overlap_1 (XEXP (x, i))) - return 1; - } + { + if (check_sibcall_argument_overlap_1 (XEXP (x, i))) + return 1; + } else if (*fmt == 'E') - { - for (j = 0; j < XVECLEN (x, i); j++) - if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) - return 1; - } + { + for (j = 0; j < XVECLEN (x, i); j++) + if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) + return 1; + } } return 0; } @@ -1856,7 +1853,7 @@ for (; insn; insn = NEXT_INSN (insn)) if (INSN_P (insn) - && check_sibcall_argument_overlap_1 (PATTERN (insn))) + && check_sibcall_argument_overlap_1 (PATTERN (insn))) break; if (mark_stored_args_map) @@ -1868,7 +1865,7 @@ #endif for (high = low + arg->locate.size.constant; low < high; low++) - SET_BIT (stored_args_map, low); + SET_BIT (stored_args_map, low); } return insn != NULL_RTX; } @@ -1891,15 +1888,11 @@ of the MIPS port, which requires SImode values to be sign-extended when stored in 64-bit registers. */ if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab, - value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) + value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) gcc_unreachable (); return true; } -#ifndef noCbC -#include "cbc-tree.h" -#endif - /* If X is a likely-spilled register value, copy it to a pseudo register and return that register. Return X otherwise. */ @@ -1913,9 +1906,9 @@ && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x)))) { /* Make sure that we generate a REG rather than a CONCAT. - Moves into CONCATs can need nontrivial instructions, - and the whole point of this function is to avoid - using the hard register directly in such a situation. */ + Moves into CONCATs can need nontrivial instructions, + and the whole point of this function is to avoid + using the hard register directly in such a situation. */ generating_concat_p = 0; new_rtx = gen_reg_rtx (GET_MODE (x)); generating_concat_p = 1; @@ -1925,7 +1918,7 @@ return x; } -/* Generate all the code for a function call +/* Generate all the code for a CALL_EXPR exp and return an rtx for its value. Store the value in TARGET (specified as an rtx) if convenient. If the value is stored in TARGET then TARGET is returned. @@ -2021,7 +2014,7 @@ /* Define the boundary of the register parm stack space that needs to be saved, if any. */ int low_to_save, high_to_save; - rtx save_area = 0; /* Place that it is saved */ + rtx save_area = 0; /* Place that it is saved */ #endif int initial_highest_arg_in_use = highest_outgoing_arg_in_use; @@ -2083,25 +2076,25 @@ if ((flags & (ECF_CONST | ECF_PURE)) && (!(flags & ECF_LOOPING_CONST_OR_PURE)) && (ignore || target == const0_rtx - || TYPE_MODE (rettype) == VOIDmode)) + || TYPE_MODE (rettype) == VOIDmode)) { bool volatilep = false; tree arg; call_expr_arg_iterator iter; FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) - if (TREE_THIS_VOLATILE (arg)) - { - volatilep = true; - break; - } + if (TREE_THIS_VOLATILE (arg)) + { + volatilep = true; + break; + } if (! volatilep) - { - FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) - expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); - return const0_rtx; - } + { + FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) + expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); + return const0_rtx; + } } #ifdef REG_PARM_STACK_SPACE @@ -2122,25 +2115,25 @@ #ifdef PCC_STATIC_STRUCT_RETURN { - pcc_struct_value = 1; + pcc_struct_value = 1; } #else /* not PCC_STATIC_STRUCT_RETURN */ { - struct_value_size = int_size_in_bytes (rettype); - - if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) - structure_value_addr = XEXP (target, 0); - else - { - /* For variable-sized objects, we must be called with a target - specified. If we were to allocate space on the stack here, - we would have no way of knowing when to free it. */ - rtx d = assign_temp (rettype, 0, 1, 1); - - mark_temp_addr_taken (d); - structure_value_addr = XEXP (d, 0); - target = 0; - } + struct_value_size = int_size_in_bytes (rettype); + + if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) + structure_value_addr = XEXP (target, 0); + else + { + /* For variable-sized objects, we must be called with a target + specified. If we were to allocate space on the stack here, + we would have no way of knowing when to free it. */ + rtx d = assign_temp (rettype, 0, 1, 1); + + mark_temp_addr_taken (d); + structure_value_addr = XEXP (d, 0); + target = 0; + } } #endif /* not PCC_STATIC_STRUCT_RETURN */ } @@ -2151,14 +2144,14 @@ { struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl); /* Without automatic stack alignment, we can't increase preferred - stack boundary. With automatic stack alignment, it is - unnecessary since unless we can guarantee that all callers will - align the outgoing stack properly, callee has to align its - stack anyway. */ + stack boundary. With automatic stack alignment, it is + unnecessary since unless we can guarantee that all callers will + align the outgoing stack properly, callee has to align its + stack anyway. */ if (i - && i->preferred_incoming_stack_boundary - && i->preferred_incoming_stack_boundary < preferred_stack_boundary) - preferred_stack_boundary = i->preferred_incoming_stack_boundary; + && i->preferred_incoming_stack_boundary + && i->preferred_incoming_stack_boundary < preferred_stack_boundary) + preferred_stack_boundary = i->preferred_incoming_stack_boundary; } /* Operand 0 is a pointer-to-function; get the type of the function. */ @@ -2174,12 +2167,12 @@ call_expr_arg_iterator iter; tree arg; FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) - { - tree type = TREE_TYPE (arg); - if (type && TREE_CODE (type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (type)) - num_complex_actuals++; - } + { + tree type = TREE_TYPE (arg); + if (type && TREE_CODE (type) == COMPLEX_TYPE + && targetm.calls.split_complex_arg (type)) + num_complex_actuals++; + } type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); } else @@ -2194,20 +2187,20 @@ if (structure_value_addr && struct_value == 0) { /* If structure_value_addr is a REG other than - virtual_outgoing_args_rtx, we can use always use it. If it - is not a REG, we must always copy it into a register. - If it is virtual_outgoing_args_rtx, we must copy it to another - register in some cases. */ + virtual_outgoing_args_rtx, we can use always use it. If it + is not a REG, we must always copy it into a register. + If it is virtual_outgoing_args_rtx, we must copy it to another + register in some cases. */ rtx temp = (!REG_P (structure_value_addr) - || (ACCUMULATE_OUTGOING_ARGS - && stack_arg_under_construction - && structure_value_addr == virtual_outgoing_args_rtx) - ? copy_addr_to_reg (convert_memory_address - (Pmode, structure_value_addr)) - : structure_value_addr); + || (ACCUMULATE_OUTGOING_ARGS + && stack_arg_under_construction + && structure_value_addr == virtual_outgoing_args_rtx) + ? copy_addr_to_reg (convert_memory_address + (Pmode, structure_value_addr)) + : structure_value_addr); structure_value_addr_value = - make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); + make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); structure_value_addr_parm = 1; } @@ -2221,8 +2214,8 @@ if (type_arg_types != 0) n_named_args = (list_length (type_arg_types) - /* Count the struct value address, if it is passed as a parm. */ - + structure_value_addr_parm); + /* Count the struct value address, if it is passed as a parm. */ + + structure_value_addr_parm); else /* If we know nothing, treat all args as named. */ n_named_args = num_actuals; @@ -2257,7 +2250,7 @@ && targetm.calls.strict_argument_naming (&args_so_far)) ; else if (type_arg_types != 0 - && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far)) + && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far)) /* Don't include the last named arg. */ --n_named_args; else @@ -2271,20 +2264,20 @@ /* Build up entries in the ARGS array, compute the size of the arguments into ARGS_SIZE, etc. */ initialize_argument_information (num_actuals, args, &args_size, - n_named_args, exp, - structure_value_addr_value, fndecl, fntype, - &args_so_far, reg_parm_stack_space, - &old_stack_level, &old_pending_adj, - &must_preallocate, &flags, - &try_tail_call, CALL_FROM_THUNK_P (exp)); + n_named_args, exp, + structure_value_addr_value, fndecl, fntype, + &args_so_far, reg_parm_stack_space, + &old_stack_level, &old_pending_adj, + &must_preallocate, &flags, + &try_tail_call, CALL_FROM_THUNK_P (exp)); if (args_size.var) must_preallocate = 1; /* Now make final decision about preallocating stack space. */ must_preallocate = finalize_must_preallocate (must_preallocate, - num_actuals, args, - &args_size); + num_actuals, args, + &args_size); /* If the structure value address will reference the stack pointer, we must stabilize it. We don't need to do this if we know that we are @@ -2292,10 +2285,10 @@ if (structure_value_addr && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) - || reg_mentioned_p (virtual_outgoing_args_rtx, - structure_value_addr)) + || reg_mentioned_p (virtual_outgoing_args_rtx, + structure_value_addr)) && (args_size.var - || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) + || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) structure_value_addr = copy_to_reg (structure_value_addr); /* Tail calls can make things harder to debug, and we've traditionally @@ -2303,13 +2296,8 @@ expanding a call, as that means we're an argument. Don't try if there's cleanups, as we know there's code to follow the call. */ - // -O2オプションがないときも末尾最適化が行われるように(Code Segmentのみ) if (currently_expanding_call++ != 0 -#ifndef noCbC - || ((!fndecl || !CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl))) && !flag_optimize_sibling_calls) -#else || !flag_optimize_sibling_calls -#endif || args_size.var || dbg_cnt (tail_call) == false) try_tail_call = 0; @@ -2323,33 +2311,33 @@ #endif || !try_tail_call /* Doing sibling call optimization needs some work, since - structure_value_addr can be allocated on the stack. - It does not seem worth the effort since few optimizable - sibling calls will return a structure. */ + structure_value_addr can be allocated on the stack. + It does not seem worth the effort since few optimizable + sibling calls will return a structure. */ || structure_value_addr != NULL_RTX #ifdef REG_PARM_STACK_SPACE /* If outgoing reg parm stack space changes, we can not do sibcall. */ || (OUTGOING_REG_PARM_STACK_SPACE (funtype) - != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))) + != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))) || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl)) #endif /* Check whether the target is able to optimize the call - into a sibcall. */ + into a sibcall. */ || !targetm.function_ok_for_sibcall (fndecl, exp) /* Functions that do not return exactly once may not be sibcall - optimized. */ + optimized. */ || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN)) || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))) /* If the called function is nested in the current one, it might access - some of the caller's arguments, but could clobber them beforehand if - the argument areas are shared. */ + some of the caller's arguments, but could clobber them beforehand if + the argument areas are shared. */ || (fndecl && decl_function_context (fndecl) == current_function_decl) /* If this function requires more stack slots than the current - function, we cannot change it into a sibling call. - crtl->args.pretend_args_size is not part of the - stack allocated by our caller. */ + function, we cannot change it into a sibling call. + crtl->args.pretend_args_size is not part of the + stack allocated by our caller. */ || args_size.constant > (crtl->args.size - - crtl->args.pretend_args_size) + - crtl->args.pretend_args_size) /* If the callee pops its own arguments, then it must pop exactly the same number of arguments as the current function. */ || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant) @@ -2373,21 +2361,21 @@ callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); callee_mode = TYPE_MODE (TREE_TYPE (funtype)); caller_promoted_mode - = promote_function_mode (TREE_TYPE (caller_res), caller_mode, - &caller_unsignedp, - TREE_TYPE (current_function_decl), 1); + = promote_function_mode (TREE_TYPE (caller_res), caller_mode, + &caller_unsignedp, + TREE_TYPE (current_function_decl), 1); callee_promoted_mode - = promote_function_mode (TREE_TYPE (funtype), callee_mode, - &callee_unsignedp, - funtype, 1); + = promote_function_mode (TREE_TYPE (funtype), callee_mode, + &callee_unsignedp, + funtype, 1); if (caller_mode != VOIDmode - && (caller_promoted_mode != callee_promoted_mode - || ((caller_mode != caller_promoted_mode - || callee_mode != callee_promoted_mode) - && (caller_unsignedp != callee_unsignedp - || GET_MODE_BITSIZE (caller_mode) - < GET_MODE_BITSIZE (callee_mode))))) - try_tail_call = 0; + && (caller_promoted_mode != callee_promoted_mode + || ((caller_mode != caller_promoted_mode + || callee_mode != callee_promoted_mode) + && (caller_unsignedp != callee_unsignedp + || GET_MODE_BITSIZE (caller_mode) + < GET_MODE_BITSIZE (callee_mode))))) + try_tail_call = 0; } /* Ensure current function's preferred stack boundary is at least @@ -2400,67 +2388,6 @@ preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT; -#ifndef noCbC - if ( fntype - && CbC_IS_CbC_GOTO (exp) // it's better? than CALL_EXPR_TAILCALL() - && CbC_IS_CODE_SEGMENT (TREE_TYPE (current_function_decl)) - ) - { - - args_size.constant = CbC_PRETENDED_STACK_SIZE; - // try_tail_callを矯正的に立たせて末尾最適化を必ずうように変更 - // -> expand_cbc_gotは不要に。 - /* return expand_cbc_goto(exp, target, fndecl, funtype, fntype, - * addr, ignore, flags, num_actuals, args, &args_size, - * args_so_far, - * old_stack_level, reg_parm_stack_space, old_pending_adj, - * preferred_stack_boundary, preferred_unit_stack_boundary, - * structure_value_addr, old_inhibit_defer_pop); */ - } - else if ( CbC_IS_CbC_GOTO (exp) ) - { - // TODO: 関数からコードセグメントへの遷移 - /* - if (fndecl) - { - char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl)); - warning(0, "no warning: code segment `%s' has been called from a function.", name_callee); - } - else - { - warning(0, "no warning: unnamed code segment has been called from a function."); - } - */ - args_size.constant = CbC_PRETENDED_STACK_SIZE; - } - else if ( fndecl && CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl)) ) - { - // 警告コードセグメントを関数呼び出し - //char *name= IDENTIFIER_POINTER(DECL_NAME(fndecl)); - //warning (0, "code segment `%s' has been \"called\" instead \"goto\".", name); - } - else if (CbC_IS_CODE_SEGMENT(TREE_TYPE (current_function_decl)) ) - { - // code segment内部からの関数呼び出し。なんも問題ない。 - //warning (0, "no warning: normal call from a code segment."); - } -#endif - - // when tail call optimization flag was down, warn about them. - // and flag it to force a tail call optimize. -#ifndef noCbC - if (fndecl && CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl)) - && CbC_IS_CODE_SEGMENT (TREE_TYPE (current_function_decl)) - && try_tail_call == 0) - { - location_t loc = EXPR_LOCATION (exp); - char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl)); - warning_at (loc, 0, "transition to code segment \"%s\" with CbC goto, but tail call optimization was cut.", - name_callee); - try_tail_call = 1; - } -#endif - /* We want to make two insn chains; one for a sibling call, the other for a normal call. We will select one of the two chains after initial RTL generation is complete. */ @@ -2468,110 +2395,95 @@ { int sibcall_failure = 0; /* We want to emit any pending stack adjustments before the tail - recursion "call". That way we know any adjustment after the tail - recursion call can be ignored if we indeed use the tail - call expansion. */ + recursion "call". That way we know any adjustment after the tail + recursion call can be ignored if we indeed use the tail + call expansion. */ int save_pending_stack_adjust = 0; int save_stack_pointer_delta = 0; rtx insns; rtx before_call, next_arg_reg, after_args; if (pass == 0) - { - /* State variables we need to save and restore between - iterations. */ - save_pending_stack_adjust = pending_stack_adjust; - save_stack_pointer_delta = stack_pointer_delta; - } + { + /* State variables we need to save and restore between + iterations. */ + save_pending_stack_adjust = pending_stack_adjust; + save_stack_pointer_delta = stack_pointer_delta; + } if (pass) - flags &= ~ECF_SIBCALL; + flags &= ~ECF_SIBCALL; else - flags |= ECF_SIBCALL; + flags |= ECF_SIBCALL; /* Other state variables that we must reinitialize each time - through the loop (that are not initialized by the loop itself). */ + through the loop (that are not initialized by the loop itself). */ argblock = 0; call_fusage = 0; /* Start a new sequence for the normal call case. - From this point on, if the sibling call fails, we want to set - sibcall_failure instead of continuing the loop. */ + From this point on, if the sibling call fails, we want to set + sibcall_failure instead of continuing the loop. */ start_sequence (); /* Don't let pending stack adjusts add up to too much. - Also, do all pending adjustments now if there is any chance - this might be a call to alloca or if we are expanding a sibling - call sequence. - Also do the adjustments before a throwing call, otherwise - exception handling can fail; PR 19225. */ + Also, do all pending adjustments now if there is any chance + this might be a call to alloca or if we are expanding a sibling + call sequence. + Also do the adjustments before a throwing call, otherwise + exception handling can fail; PR 19225. */ if (pending_stack_adjust >= 32 - || (pending_stack_adjust > 0 - && (flags & ECF_MAY_BE_ALLOCA)) - || (pending_stack_adjust > 0 - && flag_exceptions && !(flags & ECF_NOTHROW)) - || pass == 0) - do_pending_stack_adjust (); + || (pending_stack_adjust > 0 + && (flags & ECF_MAY_BE_ALLOCA)) + || (pending_stack_adjust > 0 + && flag_exceptions && !(flags & ECF_NOTHROW)) + || pass == 0) + do_pending_stack_adjust (); /* Precompute any arguments as needed. */ if (pass) - precompute_arguments (num_actuals, args); + precompute_arguments (num_actuals, args); /* Now we are about to start emitting insns that can be deleted - if a libcall is deleted. */ + if a libcall is deleted. */ if (pass && (flags & ECF_MALLOC)) - start_sequence (); + start_sequence (); if (pass == 0 && crtl->stack_protect_guard) - stack_protect_epilogue (); + stack_protect_epilogue (); adjusted_args_size = args_size; /* Compute the actual size of the argument block required. The variable - and constant sizes must be combined, the size may have to be rounded, - and there may be a minimum required size. When generating a sibcall - pattern, do not round up, since we'll be re-using whatever space our - caller provided. */ -#ifndef noCbC - if ( fntype && CbC_IS_CODE_SEGMENT(fntype) ) - { - unadjusted_args_size = args_size.constant; - adjusted_args_size.constant = CbC_PRETENDED_STACK_SIZE; - compute_argument_block_size (reg_parm_stack_space, - &adjusted_args_size, - fndecl, fntype, - (pass == 0 ? 0 - : preferred_stack_boundary)); - } - else -#endif - { + and constant sizes must be combined, the size may have to be rounded, + and there may be a minimum required size. When generating a sibcall + pattern, do not round up, since we'll be re-using whatever space our + caller provided. */ unadjusted_args_size - = compute_argument_block_size (reg_parm_stack_space, - &adjusted_args_size, - fndecl, fntype, - (pass == 0 ? 0 - : preferred_stack_boundary)); - } + = compute_argument_block_size (reg_parm_stack_space, + &adjusted_args_size, + fndecl, fntype, + (pass == 0 ? 0 + : preferred_stack_boundary)); old_stack_allocated = stack_pointer_delta - pending_stack_adjust; /* The argument block when performing a sibling call is the - incoming argument block. */ + incoming argument block. */ if (pass == 0) - { - argblock = crtl->args.internal_arg_pointer; - argblock + { + argblock = crtl->args.internal_arg_pointer; + argblock #ifdef STACK_GROWS_DOWNWARD - = plus_constant (argblock, crtl->args.pretend_args_size); + = plus_constant (argblock, crtl->args.pretend_args_size); #else - = plus_constant (argblock, -crtl->args.pretend_args_size); + = plus_constant (argblock, -crtl->args.pretend_args_size); #endif - stored_args_map = sbitmap_alloc (args_size.constant); - sbitmap_zero (stored_args_map); - } + stored_args_map = sbitmap_alloc (args_size.constant); + sbitmap_zero (stored_args_map); + } /* If we have no actual push instructions, or shouldn't use them, - make space for all args right now. */ + make space for all args right now. */ else if (adjusted_args_size.var != 0) { if (old_stack_level == 0) @@ -2591,130 +2503,130 @@ current_function_has_unbounded_dynamic_stack_size = 1; } else - { - /* Note that we must go through the motions of allocating an argument - block even if the size is zero because we may be storing args - in the area reserved for register arguments, which may be part of - the stack frame. */ - - int needed = adjusted_args_size.constant; - - /* Store the maximum argument space used. It will be pushed by - the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow - checking). */ - - if (needed > crtl->outgoing_args_size) - crtl->outgoing_args_size = needed; - - if (must_preallocate) - { - if (ACCUMULATE_OUTGOING_ARGS) - { - /* Since the stack pointer will never be pushed, it is - possible for the evaluation of a parm to clobber - something we have already written to the stack. - Since most function calls on RISC machines do not use - the stack, this is uncommon, but must work correctly. - - Therefore, we save any area of the stack that was already - written and that we are using. Here we set up to do this - by making a new stack usage map from the old one. The - actual save will be done by store_one_arg. - - Another approach might be to try to reorder the argument - evaluations to avoid this conflicting stack usage. */ - - /* Since we will be writing into the entire argument area, - the map must be allocated for its entire size, not just - the part that is the responsibility of the caller. */ - if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - needed += reg_parm_stack_space; + { + /* Note that we must go through the motions of allocating an argument + block even if the size is zero because we may be storing args + in the area reserved for register arguments, which may be part of + the stack frame. */ + + int needed = adjusted_args_size.constant; + + /* Store the maximum argument space used. It will be pushed by + the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow + checking). */ + + if (needed > crtl->outgoing_args_size) + crtl->outgoing_args_size = needed; + + if (must_preallocate) + { + if (ACCUMULATE_OUTGOING_ARGS) + { + /* Since the stack pointer will never be pushed, it is + possible for the evaluation of a parm to clobber + something we have already written to the stack. + Since most function calls on RISC machines do not use + the stack, this is uncommon, but must work correctly. + + Therefore, we save any area of the stack that was already + written and that we are using. Here we set up to do this + by making a new stack usage map from the old one. The + actual save will be done by store_one_arg. + + Another approach might be to try to reorder the argument + evaluations to avoid this conflicting stack usage. */ + + /* Since we will be writing into the entire argument area, + the map must be allocated for its entire size, not just + the part that is the responsibility of the caller. */ + if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) + needed += reg_parm_stack_space; #ifdef ARGS_GROW_DOWNWARD - highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed + 1); + highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, + needed + 1); #else - highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed); + highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, + needed); #endif - if (stack_usage_map_buf) - free (stack_usage_map_buf); - stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); - stack_usage_map = stack_usage_map_buf; - - if (initial_highest_arg_in_use) - memcpy (stack_usage_map, initial_stack_usage_map, - initial_highest_arg_in_use); - - if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) - memset (&stack_usage_map[initial_highest_arg_in_use], 0, - (highest_outgoing_arg_in_use - - initial_highest_arg_in_use)); - needed = 0; - - /* The address of the outgoing argument list must not be - copied to a register here, because argblock would be left - pointing to the wrong place after the call to - allocate_dynamic_stack_space below. */ - - argblock = virtual_outgoing_args_rtx; - } - else - { - if (inhibit_defer_pop == 0) - { - /* Try to reuse some or all of the pending_stack_adjust - to get this space. */ - needed - = (combine_pending_stack_adjustment_and_call - (unadjusted_args_size, - &adjusted_args_size, - preferred_unit_stack_boundary)); - - /* combine_pending_stack_adjustment_and_call computes - an adjustment before the arguments are allocated. - Account for them and see whether or not the stack - needs to go up or down. */ - needed = unadjusted_args_size - needed; - - if (needed < 0) - { - /* We're releasing stack space. */ - /* ??? We can avoid any adjustment at all if we're - already aligned. FIXME. */ - pending_stack_adjust = -needed; - do_pending_stack_adjust (); - needed = 0; - } - else - /* We need to allocate space. We'll do that in - push_block below. */ - pending_stack_adjust = 0; - } - - /* Special case this because overhead of `push_block' in - this case is non-trivial. */ - if (needed == 0) - argblock = virtual_outgoing_args_rtx; - else - { - argblock = push_block (GEN_INT (needed), 0, 0); + if (stack_usage_map_buf) + free (stack_usage_map_buf); + stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); + stack_usage_map = stack_usage_map_buf; + + if (initial_highest_arg_in_use) + memcpy (stack_usage_map, initial_stack_usage_map, + initial_highest_arg_in_use); + + if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) + memset (&stack_usage_map[initial_highest_arg_in_use], 0, + (highest_outgoing_arg_in_use + - initial_highest_arg_in_use)); + needed = 0; + + /* The address of the outgoing argument list must not be + copied to a register here, because argblock would be left + pointing to the wrong place after the call to + allocate_dynamic_stack_space below. */ + + argblock = virtual_outgoing_args_rtx; + } + else + { + if (inhibit_defer_pop == 0) + { + /* Try to reuse some or all of the pending_stack_adjust + to get this space. */ + needed + = (combine_pending_stack_adjustment_and_call + (unadjusted_args_size, + &adjusted_args_size, + preferred_unit_stack_boundary)); + + /* combine_pending_stack_adjustment_and_call computes + an adjustment before the arguments are allocated. + Account for them and see whether or not the stack + needs to go up or down. */ + needed = unadjusted_args_size - needed; + + if (needed < 0) + { + /* We're releasing stack space. */ + /* ??? We can avoid any adjustment at all if we're + already aligned. FIXME. */ + pending_stack_adjust = -needed; + do_pending_stack_adjust (); + needed = 0; + } + else + /* We need to allocate space. We'll do that in + push_block below. */ + pending_stack_adjust = 0; + } + + /* Special case this because overhead of `push_block' in + this case is non-trivial. */ + if (needed == 0) + argblock = virtual_outgoing_args_rtx; + else + { + argblock = push_block (GEN_INT (needed), 0, 0); #ifdef ARGS_GROW_DOWNWARD - argblock = plus_constant (argblock, needed); + argblock = plus_constant (argblock, needed); #endif - } - - /* We only really need to call `copy_to_reg' in the case - where push insns are going to be used to pass ARGBLOCK - to a function call in ARGS. In that case, the stack - pointer changes value from the allocation point to the - call point, and hence the value of - VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might - as well always do it. */ - argblock = copy_to_reg (argblock); - } - } - } + } + + /* We only really need to call `copy_to_reg' in the case + where push insns are going to be used to pass ARGBLOCK + to a function call in ARGS. In that case, the stack + pointer changes value from the allocation point to the + call point, and hence the value of + VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might + as well always do it. */ + argblock = copy_to_reg (argblock); + } + } + } if (ACCUMULATE_OUTGOING_ARGS) { @@ -2769,28 +2681,28 @@ compute_argument_addresses (args, argblock, num_actuals); /* If we push args individually in reverse order, perform stack alignment - before the first push (the last arg). */ + before the first push (the last arg). */ if (PUSH_ARGS_REVERSED && argblock == 0 - && adjusted_args_size.constant != unadjusted_args_size) - { - /* When the stack adjustment is pending, we get better code - by combining the adjustments. */ - if (pending_stack_adjust - && ! inhibit_defer_pop) - { - pending_stack_adjust - = (combine_pending_stack_adjustment_and_call - (unadjusted_args_size, - &adjusted_args_size, - preferred_unit_stack_boundary)); - do_pending_stack_adjust (); - } - else if (argblock == 0) - anti_adjust_stack (GEN_INT (adjusted_args_size.constant - - unadjusted_args_size)); - } + && adjusted_args_size.constant != unadjusted_args_size) + { + /* When the stack adjustment is pending, we get better code + by combining the adjustments. */ + if (pending_stack_adjust + && ! inhibit_defer_pop) + { + pending_stack_adjust + = (combine_pending_stack_adjustment_and_call + (unadjusted_args_size, + &adjusted_args_size, + preferred_unit_stack_boundary)); + do_pending_stack_adjust (); + } + else if (argblock == 0) + anti_adjust_stack (GEN_INT (adjusted_args_size.constant + - unadjusted_args_size)); + } /* Now that the stack is properly aligned, pops can't safely - be deferred during the evaluation of the arguments. */ + be deferred during the evaluation of the arguments. */ NO_DEFER_POP; /* Record the maximum pushed stack space size. We need to delay @@ -2811,138 +2723,138 @@ /* Figure out the register where the value, if any, will come back. */ valreg = 0; if (TYPE_MODE (rettype) != VOIDmode - && ! structure_value_addr) - { - if (pcc_struct_value) - valreg = hard_function_value (build_pointer_type (rettype), - fndecl, NULL, (pass == 0)); - else - valreg = hard_function_value (rettype, fndecl, fntype, - (pass == 0)); - - /* If VALREG is a PARALLEL whose first member has a zero - offset, use that. This is for targets such as m68k that - return the same value in multiple places. */ - if (GET_CODE (valreg) == PARALLEL) - { - rtx elem = XVECEXP (valreg, 0, 0); - rtx where = XEXP (elem, 0); - rtx offset = XEXP (elem, 1); - if (offset == const0_rtx - && GET_MODE (where) == GET_MODE (valreg)) - valreg = where; - } - } + && ! structure_value_addr) + { + if (pcc_struct_value) + valreg = hard_function_value (build_pointer_type (rettype), + fndecl, NULL, (pass == 0)); + else + valreg = hard_function_value (rettype, fndecl, fntype, + (pass == 0)); + + /* If VALREG is a PARALLEL whose first member has a zero + offset, use that. This is for targets such as m68k that + return the same value in multiple places. */ + if (GET_CODE (valreg) == PARALLEL) + { + rtx elem = XVECEXP (valreg, 0, 0); + rtx where = XEXP (elem, 0); + rtx offset = XEXP (elem, 1); + if (offset == const0_rtx + && GET_MODE (where) == GET_MODE (valreg)) + valreg = where; + } + } /* Precompute all register parameters. It isn't safe to compute anything - once we have started filling any specific hard regs. */ + once we have started filling any specific hard regs. */ precompute_register_parameters (num_actuals, args, ®_parm_seen); if (CALL_EXPR_STATIC_CHAIN (exp)) - static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); + static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); else - static_chain_value = 0; + static_chain_value = 0; #ifdef REG_PARM_STACK_SPACE /* Save the fixed argument area if it's part of the caller's frame and - is clobbered by argument setup for this call. */ + is clobbered by argument setup for this call. */ if (ACCUMULATE_OUTGOING_ARGS && pass) - save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, - &low_to_save, &high_to_save); + save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, + &low_to_save, &high_to_save); #endif /* Now store (and compute if necessary) all non-register parms. - These come before register parms, since they can require block-moves, - which could clobber the registers used for register parms. - Parms which have partial registers are not stored here, - but we do preallocate space here if they want that. */ + These come before register parms, since they can require block-moves, + which could clobber the registers used for register parms. + Parms which have partial registers are not stored here, + but we do preallocate space here if they want that. */ for (i = 0; i < num_actuals; i++) - { - if (args[i].reg == 0 || args[i].pass_on_stack) - { - rtx before_arg = get_last_insn (); - - if (store_one_arg (&args[i], argblock, flags, - adjusted_args_size.var != 0, - reg_parm_stack_space) - || (pass == 0 - && check_sibcall_argument_overlap (before_arg, - &args[i], 1))) - sibcall_failure = 1; - } - - if (((flags & ECF_CONST) - || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) - && args[i].stack) - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_USE (VOIDmode, - args[i].stack), - call_fusage); - } + { + if (args[i].reg == 0 || args[i].pass_on_stack) + { + rtx before_arg = get_last_insn (); + + if (store_one_arg (&args[i], argblock, flags, + adjusted_args_size.var != 0, + reg_parm_stack_space) + || (pass == 0 + && check_sibcall_argument_overlap (before_arg, + &args[i], 1))) + sibcall_failure = 1; + } + + if (((flags & ECF_CONST) + || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) + && args[i].stack) + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_USE (VOIDmode, + args[i].stack), + call_fusage); + } /* If we have a parm that is passed in registers but not in memory - and whose alignment does not permit a direct copy into registers, - make a group of pseudos that correspond to each register that we - will later fill. */ + and whose alignment does not permit a direct copy into registers, + make a group of pseudos that correspond to each register that we + will later fill. */ if (STRICT_ALIGNMENT) - store_unaligned_arguments_into_pseudos (args, num_actuals); + store_unaligned_arguments_into_pseudos (args, num_actuals); /* Now store any partially-in-registers parm. - This is the last place a block-move can happen. */ + This is the last place a block-move can happen. */ if (reg_parm_seen) - for (i = 0; i < num_actuals; i++) - if (args[i].partial != 0 && ! args[i].pass_on_stack) - { - rtx before_arg = get_last_insn (); - - if (store_one_arg (&args[i], argblock, flags, - adjusted_args_size.var != 0, - reg_parm_stack_space) - || (pass == 0 - && check_sibcall_argument_overlap (before_arg, - &args[i], 1))) - sibcall_failure = 1; - } + for (i = 0; i < num_actuals; i++) + if (args[i].partial != 0 && ! args[i].pass_on_stack) + { + rtx before_arg = get_last_insn (); + + if (store_one_arg (&args[i], argblock, flags, + adjusted_args_size.var != 0, + reg_parm_stack_space) + || (pass == 0 + && check_sibcall_argument_overlap (before_arg, + &args[i], 1))) + sibcall_failure = 1; + } /* If we pushed args in forward order, perform stack alignment - after pushing the last arg. */ + after pushing the last arg. */ if (!PUSH_ARGS_REVERSED && argblock == 0) - anti_adjust_stack (GEN_INT (adjusted_args_size.constant - - unadjusted_args_size)); + anti_adjust_stack (GEN_INT (adjusted_args_size.constant + - unadjusted_args_size)); /* If register arguments require space on the stack and stack space - was not preallocated, allocate stack space here for arguments - passed in registers. */ + was not preallocated, allocate stack space here for arguments + passed in registers. */ if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) && !ACCUMULATE_OUTGOING_ARGS - && must_preallocate == 0 && reg_parm_stack_space > 0) - anti_adjust_stack (GEN_INT (reg_parm_stack_space)); + && must_preallocate == 0 && reg_parm_stack_space > 0) + anti_adjust_stack (GEN_INT (reg_parm_stack_space)); /* Pass the function the address in which to return a - structure value. */ + structure value. */ if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) - { - structure_value_addr - = convert_memory_address (Pmode, structure_value_addr); - emit_move_insn (struct_value, - force_reg (Pmode, - force_operand (structure_value_addr, - NULL_RTX))); - - if (REG_P (struct_value)) - use_reg (&call_fusage, struct_value); - } + { + structure_value_addr + = convert_memory_address (Pmode, structure_value_addr); + emit_move_insn (struct_value, + force_reg (Pmode, + force_operand (structure_value_addr, + NULL_RTX))); + + if (REG_P (struct_value)) + use_reg (&call_fusage, struct_value); + } after_args = get_last_insn (); funexp = prepare_call_address (fndecl, funexp, static_chain_value, - &call_fusage, reg_parm_seen, pass == 0); + &call_fusage, reg_parm_seen, pass == 0); load_register_parameters (args, num_actuals, &call_fusage, flags, - pass == 0, &sibcall_failure); + pass == 0, &sibcall_failure); /* Save a pointer to the last insn before the call, so that we can - later safely search backwards to find the CALL_INSN. */ + later safely search backwards to find the CALL_INSN. */ before_call = get_last_insn (); /* Set up next argument register. For sibling calls on machines @@ -2958,233 +2870,233 @@ true); /* All arguments and registers used for the call must be set up by - now! */ + now! */ /* Stack must be properly aligned now. */ gcc_assert (!pass - || !(stack_pointer_delta % preferred_unit_stack_boundary)); + || !(stack_pointer_delta % preferred_unit_stack_boundary)); /* Generate the actual call instruction. */ emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, - adjusted_args_size.constant, struct_value_size, - next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, - flags, & args_so_far); + adjusted_args_size.constant, struct_value_size, + next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, + flags, & args_so_far); /* If the call setup or the call itself overlaps with anything - of the argument setup we probably clobbered our call address. - In that case we can't do sibcalls. */ + of the argument setup we probably clobbered our call address. + In that case we can't do sibcalls. */ if (pass == 0 - && check_sibcall_argument_overlap (after_args, 0, 0)) - sibcall_failure = 1; + && check_sibcall_argument_overlap (after_args, 0, 0)) + sibcall_failure = 1; /* If a non-BLKmode value is returned at the most significant end - of a register, shift the register right by the appropriate amount - and update VALREG accordingly. BLKmode values are handled by the - group load/store machinery below. */ + of a register, shift the register right by the appropriate amount + and update VALREG accordingly. BLKmode values are handled by the + group load/store machinery below. */ if (!structure_value_addr - && !pcc_struct_value - && TYPE_MODE (rettype) != BLKmode - && targetm.calls.return_in_msb (rettype)) - { - if (shift_return_value (TYPE_MODE (rettype), false, valreg)) - sibcall_failure = 1; - valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); - } + && !pcc_struct_value + && TYPE_MODE (rettype) != BLKmode + && targetm.calls.return_in_msb (rettype)) + { + if (shift_return_value (TYPE_MODE (rettype), false, valreg)) + sibcall_failure = 1; + valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); + } if (pass && (flags & ECF_MALLOC)) - { - rtx temp = gen_reg_rtx (GET_MODE (valreg)); - rtx last, insns; - - /* The return value from a malloc-like function is a pointer. */ - if (TREE_CODE (rettype) == POINTER_TYPE) - mark_reg_pointer (temp, BIGGEST_ALIGNMENT); - - emit_move_insn (temp, valreg); - - /* The return value from a malloc-like function can not alias - anything else. */ - last = get_last_insn (); - add_reg_note (last, REG_NOALIAS, temp); - - /* Write out the sequence. */ - insns = get_insns (); - end_sequence (); - emit_insn (insns); - valreg = temp; - } + { + rtx temp = gen_reg_rtx (GET_MODE (valreg)); + rtx last, insns; + + /* The return value from a malloc-like function is a pointer. */ + if (TREE_CODE (rettype) == POINTER_TYPE) + mark_reg_pointer (temp, BIGGEST_ALIGNMENT); + + emit_move_insn (temp, valreg); + + /* The return value from a malloc-like function can not alias + anything else. */ + last = get_last_insn (); + add_reg_note (last, REG_NOALIAS, temp); + + /* Write out the sequence. */ + insns = get_insns (); + end_sequence (); + emit_insn (insns); + valreg = temp; + } /* For calls to `setjmp', etc., inform - function.c:setjmp_warnings that it should complain if - nonvolatile values are live. For functions that cannot - return, inform flow that control does not fall through. */ + function.c:setjmp_warnings that it should complain if + nonvolatile values are live. For functions that cannot + return, inform flow that control does not fall through. */ if ((flags & ECF_NORETURN) || pass == 0) - { - /* The barrier must be emitted - immediately after the CALL_INSN. Some ports emit more - than just a CALL_INSN above, so we must search for it here. */ - - rtx last = get_last_insn (); - while (!CALL_P (last)) - { - last = PREV_INSN (last); - /* There was no CALL_INSN? */ - gcc_assert (last != before_call); - } - - emit_barrier_after (last); - - /* Stack adjustments after a noreturn call are dead code. - However when NO_DEFER_POP is in effect, we must preserve - stack_pointer_delta. */ - if (inhibit_defer_pop == 0) - { - stack_pointer_delta = old_stack_allocated; - pending_stack_adjust = 0; - } - } + { + /* The barrier must be emitted + immediately after the CALL_INSN. Some ports emit more + than just a CALL_INSN above, so we must search for it here. */ + + rtx last = get_last_insn (); + while (!CALL_P (last)) + { + last = PREV_INSN (last); + /* There was no CALL_INSN? */ + gcc_assert (last != before_call); + } + + emit_barrier_after (last); + + /* Stack adjustments after a noreturn call are dead code. + However when NO_DEFER_POP is in effect, we must preserve + stack_pointer_delta. */ + if (inhibit_defer_pop == 0) + { + stack_pointer_delta = old_stack_allocated; + pending_stack_adjust = 0; + } + } /* If value type not void, return an rtx for the value. */ if (TYPE_MODE (rettype) == VOIDmode - || ignore) - target = const0_rtx; + || ignore) + target = const0_rtx; else if (structure_value_addr) - { - if (target == 0 || !MEM_P (target)) - { - target - = gen_rtx_MEM (TYPE_MODE (rettype), - memory_address (TYPE_MODE (rettype), - structure_value_addr)); - set_mem_attributes (target, rettype, 1); - } - } + { + if (target == 0 || !MEM_P (target)) + { + target + = gen_rtx_MEM (TYPE_MODE (rettype), + memory_address (TYPE_MODE (rettype), + structure_value_addr)); + set_mem_attributes (target, rettype, 1); + } + } else if (pcc_struct_value) - { - /* This is the special C++ case where we need to - know what the true target was. We take care to - never use this value more than once in one expression. */ - target = gen_rtx_MEM (TYPE_MODE (rettype), - copy_to_reg (valreg)); - set_mem_attributes (target, rettype, 1); - } + { + /* This is the special C++ case where we need to + know what the true target was. We take care to + never use this value more than once in one expression. */ + target = gen_rtx_MEM (TYPE_MODE (rettype), + copy_to_reg (valreg)); + set_mem_attributes (target, rettype, 1); + } /* Handle calls that return values in multiple non-contiguous locations. - The Irix 6 ABI has examples of this. */ + The Irix 6 ABI has examples of this. */ else if (GET_CODE (valreg) == PARALLEL) - { - if (target == 0) - { - /* This will only be assigned once, so it can be readonly. */ - tree nt = build_qualified_type (rettype, - (TYPE_QUALS (rettype) - | TYPE_QUAL_CONST)); - - target = assign_temp (nt, 0, 1, 1); - } - - if (! rtx_equal_p (target, valreg)) - emit_group_store (target, valreg, rettype, - int_size_in_bytes (rettype)); - - /* We can not support sibling calls for this case. */ - sibcall_failure = 1; - } + { + if (target == 0) + { + /* This will only be assigned once, so it can be readonly. */ + tree nt = build_qualified_type (rettype, + (TYPE_QUALS (rettype) + | TYPE_QUAL_CONST)); + + target = assign_temp (nt, 0, 1, 1); + } + + if (! rtx_equal_p (target, valreg)) + emit_group_store (target, valreg, rettype, + int_size_in_bytes (rettype)); + + /* We can not support sibling calls for this case. */ + sibcall_failure = 1; + } else if (target - && GET_MODE (target) == TYPE_MODE (rettype) - && GET_MODE (target) == GET_MODE (valreg)) - { - bool may_overlap = false; - - /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard - reg to a plain register. */ - if (!REG_P (target) || HARD_REGISTER_P (target)) - valreg = avoid_likely_spilled_reg (valreg); - - /* If TARGET is a MEM in the argument area, and we have - saved part of the argument area, then we can't store - directly into TARGET as it may get overwritten when we - restore the argument save area below. Don't work too - hard though and simply force TARGET to a register if it - is a MEM; the optimizer is quite likely to sort it out. */ - if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) - for (i = 0; i < num_actuals; i++) - if (args[i].save_area) - { - may_overlap = true; - break; - } - - if (may_overlap) - target = copy_to_reg (valreg); - else - { - /* TARGET and VALREG cannot be equal at this point - because the latter would not have - REG_FUNCTION_VALUE_P true, while the former would if - it were referring to the same register. - - If they refer to the same register, this move will be - a no-op, except when function inlining is being - done. */ - emit_move_insn (target, valreg); - - /* If we are setting a MEM, this code must be executed. - Since it is emitted after the call insn, sibcall - optimization cannot be performed in that case. */ - if (MEM_P (target)) - sibcall_failure = 1; - } - } + && GET_MODE (target) == TYPE_MODE (rettype) + && GET_MODE (target) == GET_MODE (valreg)) + { + bool may_overlap = false; + + /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard + reg to a plain register. */ + if (!REG_P (target) || HARD_REGISTER_P (target)) + valreg = avoid_likely_spilled_reg (valreg); + + /* If TARGET is a MEM in the argument area, and we have + saved part of the argument area, then we can't store + directly into TARGET as it may get overwritten when we + restore the argument save area below. Don't work too + hard though and simply force TARGET to a register if it + is a MEM; the optimizer is quite likely to sort it out. */ + if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) + for (i = 0; i < num_actuals; i++) + if (args[i].save_area) + { + may_overlap = true; + break; + } + + if (may_overlap) + target = copy_to_reg (valreg); + else + { + /* TARGET and VALREG cannot be equal at this point + because the latter would not have + REG_FUNCTION_VALUE_P true, while the former would if + it were referring to the same register. + + If they refer to the same register, this move will be + a no-op, except when function inlining is being + done. */ + emit_move_insn (target, valreg); + + /* If we are setting a MEM, this code must be executed. + Since it is emitted after the call insn, sibcall + optimization cannot be performed in that case. */ + if (MEM_P (target)) + sibcall_failure = 1; + } + } else if (TYPE_MODE (rettype) == BLKmode) - { - rtx val = valreg; - if (GET_MODE (val) != BLKmode) - val = avoid_likely_spilled_reg (val); - target = copy_blkmode_from_reg (target, val, rettype); - - /* We can not support sibling calls for this case. */ - sibcall_failure = 1; - } + { + rtx val = valreg; + if (GET_MODE (val) != BLKmode) + val = avoid_likely_spilled_reg (val); + target = copy_blkmode_from_reg (target, val, rettype); + + /* We can not support sibling calls for this case. */ + sibcall_failure = 1; + } else - target = copy_to_reg (avoid_likely_spilled_reg (valreg)); + target = copy_to_reg (avoid_likely_spilled_reg (valreg)); /* If we promoted this return value, make the proper SUBREG. TARGET might be const0_rtx here, so be careful. */ if (REG_P (target) - && TYPE_MODE (rettype) != BLKmode - && GET_MODE (target) != TYPE_MODE (rettype)) - { - tree type = rettype; - int unsignedp = TYPE_UNSIGNED (type); - int offset = 0; - enum machine_mode pmode; - - /* Ensure we promote as expected, and get the new unsignedness. */ - pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, - funtype, 1); - gcc_assert (GET_MODE (target) == pmode); - - if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) - && (GET_MODE_SIZE (GET_MODE (target)) - > GET_MODE_SIZE (TYPE_MODE (type)))) - { - offset = GET_MODE_SIZE (GET_MODE (target)) - - GET_MODE_SIZE (TYPE_MODE (type)); - if (! BYTES_BIG_ENDIAN) - offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; - else if (! WORDS_BIG_ENDIAN) - offset %= UNITS_PER_WORD; - } - - target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); - SUBREG_PROMOTED_VAR_P (target) = 1; - SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); - } + && TYPE_MODE (rettype) != BLKmode + && GET_MODE (target) != TYPE_MODE (rettype)) + { + tree type = rettype; + int unsignedp = TYPE_UNSIGNED (type); + int offset = 0; + enum machine_mode pmode; + + /* Ensure we promote as expected, and get the new unsignedness. */ + pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, + funtype, 1); + gcc_assert (GET_MODE (target) == pmode); + + if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) + && (GET_MODE_SIZE (GET_MODE (target)) + > GET_MODE_SIZE (TYPE_MODE (type)))) + { + offset = GET_MODE_SIZE (GET_MODE (target)) + - GET_MODE_SIZE (TYPE_MODE (type)); + if (! BYTES_BIG_ENDIAN) + offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; + else if (! WORDS_BIG_ENDIAN) + offset %= UNITS_PER_WORD; + } + + target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); + SUBREG_PROMOTED_VAR_P (target) = 1; + SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); + } /* If size of args is variable or this was a constructor call for a stack - argument, restore saved stack-pointer value. */ + argument, restore saved stack-pointer value. */ if (old_stack_level) { @@ -3198,86 +3110,86 @@ sibcall_failure = 1; } else if (ACCUMULATE_OUTGOING_ARGS && pass) - { + { #ifdef REG_PARM_STACK_SPACE - if (save_area) - restore_fixed_argument_area (save_area, argblock, - high_to_save, low_to_save); + if (save_area) + restore_fixed_argument_area (save_area, argblock, + high_to_save, low_to_save); #endif - /* If we saved any argument areas, restore them. */ - for (i = 0; i < num_actuals; i++) - if (args[i].save_area) - { - enum machine_mode save_mode = GET_MODE (args[i].save_area); - rtx stack_area - = gen_rtx_MEM (save_mode, - memory_address (save_mode, - XEXP (args[i].stack_slot, 0))); - - if (save_mode != BLKmode) - emit_move_insn (stack_area, args[i].save_area); - else - emit_block_move (stack_area, args[i].save_area, - GEN_INT (args[i].locate.size.constant), - BLOCK_OP_CALL_PARM); - } - - highest_outgoing_arg_in_use = initial_highest_arg_in_use; - stack_usage_map = initial_stack_usage_map; - } + /* If we saved any argument areas, restore them. */ + for (i = 0; i < num_actuals; i++) + if (args[i].save_area) + { + enum machine_mode save_mode = GET_MODE (args[i].save_area); + rtx stack_area + = gen_rtx_MEM (save_mode, + memory_address (save_mode, + XEXP (args[i].stack_slot, 0))); + + if (save_mode != BLKmode) + emit_move_insn (stack_area, args[i].save_area); + else + emit_block_move (stack_area, args[i].save_area, + GEN_INT (args[i].locate.size.constant), + BLOCK_OP_CALL_PARM); + } + + highest_outgoing_arg_in_use = initial_highest_arg_in_use; + stack_usage_map = initial_stack_usage_map; + } /* If this was alloca, record the new stack level for nonlocal gotos. - Check for the handler slots since we might not have a save area - for non-local gotos. */ + Check for the handler slots since we might not have a save area + for non-local gotos. */ if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) - update_nonlocal_goto_save_area (); + update_nonlocal_goto_save_area (); /* Free up storage we no longer need. */ for (i = 0; i < num_actuals; ++i) - if (args[i].aligned_regs) - free (args[i].aligned_regs); + if (args[i].aligned_regs) + free (args[i].aligned_regs); insns = get_insns (); end_sequence (); if (pass == 0) - { - tail_call_insns = insns; - - /* Restore the pending stack adjustment now that we have - finished generating the sibling call sequence. */ - - pending_stack_adjust = save_pending_stack_adjust; - stack_pointer_delta = save_stack_pointer_delta; - - /* Prepare arg structure for next iteration. */ - for (i = 0; i < num_actuals; i++) - { - args[i].value = 0; - args[i].aligned_regs = 0; - args[i].stack = 0; - } - - sbitmap_free (stored_args_map); - } + { + tail_call_insns = insns; + + /* Restore the pending stack adjustment now that we have + finished generating the sibling call sequence. */ + + pending_stack_adjust = save_pending_stack_adjust; + stack_pointer_delta = save_stack_pointer_delta; + + /* Prepare arg structure for next iteration. */ + for (i = 0; i < num_actuals; i++) + { + args[i].value = 0; + args[i].aligned_regs = 0; + args[i].stack = 0; + } + + sbitmap_free (stored_args_map); + } else - { - normal_call_insns = insns; - - /* Verify that we've deallocated all the stack we used. */ - gcc_assert ((flags & ECF_NORETURN) - || (old_stack_allocated - == stack_pointer_delta - pending_stack_adjust)); - } + { + normal_call_insns = insns; + + /* Verify that we've deallocated all the stack we used. */ + gcc_assert ((flags & ECF_NORETURN) + || (old_stack_allocated + == stack_pointer_delta - pending_stack_adjust)); + } /* If something prevents making this a sibling call, - zero out the sequence. */ + zero out the sequence. */ if (sibcall_failure) - tail_call_insns = NULL_RTX; + tail_call_insns = NULL_RTX; else - break; + break; } /* If tail call production succeeded, we need to remove REG_EQUIV notes on @@ -3320,14 +3232,14 @@ rtx note; /* There are never REG_EQUIV notes for the incoming arguments - after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ + after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ if (NOTE_P (insn) - && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) - break; + && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) + break; note = find_reg_note (insn, REG_EQUIV, 0); if (note) - remove_note (insn, note); + remove_note (insn, note); note = find_reg_note (insn, REG_EQUIV, 0); gcc_assert (!note); } @@ -3345,8 +3257,8 @@ { tree type = TREE_VALUE (p); if (TREE_CODE (type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (type)) - goto found; + && targetm.calls.split_complex_arg (type)) + goto found; } return types; @@ -3358,22 +3270,22 @@ tree complex_type = TREE_VALUE (p); if (TREE_CODE (complex_type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (complex_type)) - { - tree next, imag; - - /* Rewrite complex type with component type. */ - TREE_VALUE (p) = TREE_TYPE (complex_type); - next = TREE_CHAIN (p); - - /* Add another component type for the imaginary part. */ - imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); - TREE_CHAIN (p) = imag; - TREE_CHAIN (imag) = next; - - /* Skip the newly created node. */ - p = TREE_CHAIN (p); - } + && targetm.calls.split_complex_arg (complex_type)) + { + tree next, imag; + + /* Rewrite complex type with component type. */ + TREE_VALUE (p) = TREE_TYPE (complex_type); + next = TREE_CHAIN (p); + + /* Add another component type for the imaginary part. */ + imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); + TREE_CHAIN (p) = imag; + TREE_CHAIN (imag) = next; + + /* Skip the newly created node. */ + p = TREE_CHAIN (p); + } } return types; @@ -3385,8 +3297,8 @@ static rtx emit_library_call_value_1 (int retval, rtx orgfun, rtx value, - enum libcall_type fn_type, - enum machine_mode outmode, int nargs, va_list p) + enum libcall_type fn_type, + enum machine_mode outmode, int nargs, va_list p) { /* Total size in bytes of all the stack-parms scanned so far. */ struct args_size args_size; @@ -3422,7 +3334,7 @@ int reg_parm_stack_space = 0; int needed; rtx before_call; - tree tfom; /* type_for_mode (outmode, 0) */ + tree tfom; /* type_for_mode (outmode, 0) */ #ifdef REG_PARM_STACK_SPACE /* Define the boundary of the register parm stack space that needs to be @@ -3478,24 +3390,24 @@ { tfom = lang_hooks.types.type_for_mode (outmode, 0); if (aggregate_value_p (tfom, 0)) - { + { #ifdef PCC_STATIC_STRUCT_RETURN - rtx pointer_reg - = hard_function_value (build_pointer_type (tfom), 0, 0, 0); - mem_value = gen_rtx_MEM (outmode, pointer_reg); - pcc_struct_value = 1; - if (value == 0) - value = gen_reg_rtx (outmode); + rtx pointer_reg + = hard_function_value (build_pointer_type (tfom), 0, 0, 0); + mem_value = gen_rtx_MEM (outmode, pointer_reg); + pcc_struct_value = 1; + if (value == 0) + value = gen_reg_rtx (outmode); #else /* not PCC_STATIC_STRUCT_RETURN */ - struct_value_size = GET_MODE_SIZE (outmode); - if (value != 0 && MEM_P (value)) - mem_value = value; - else - mem_value = assign_temp (tfom, 0, 1, 1); + struct_value_size = GET_MODE_SIZE (outmode); + if (value != 0 && MEM_P (value)) + mem_value = value; + else + mem_value = assign_temp (tfom, 0, 1, 1); #endif - /* This call returns a big structure. */ - flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); - } + /* This call returns a big structure. */ + flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); + } } else tfom = void_type_node; @@ -3535,8 +3447,8 @@ /* Make sure it is a reasonable operand for a move or push insn. */ if (!REG_P (addr) && !MEM_P (addr) - && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr))) - addr = force_operand (addr, NULL_RTX); + && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr))) + addr = force_operand (addr, NULL_RTX); argvec[count].value = addr; argvec[count].mode = Pmode; @@ -3545,19 +3457,19 @@ argvec[count].reg = targetm.calls.function_arg (&args_so_far, Pmode, NULL_TREE, true); gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode, - NULL_TREE, 1) == 0); + NULL_TREE, 1) == 0); locate_and_pad_parm (Pmode, NULL_TREE, #ifdef STACK_PARMS_IN_REG_PARM_AREA - 1, + 1, #else - argvec[count].reg != 0, + argvec[count].reg != 0, #endif - 0, NULL_TREE, &args_size, &argvec[count].locate); + 0, NULL_TREE, &args_size, &argvec[count].locate); if (argvec[count].reg == 0 || argvec[count].partial != 0 - || reg_parm_stack_space > 0) - args_size.constant += argvec[count].locate.size.constant; + || reg_parm_stack_space > 0) + args_size.constant += argvec[count].locate.size.constant; targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true); @@ -3570,50 +3482,50 @@ enum machine_mode mode = (enum machine_mode) va_arg (p, int); /* We cannot convert the arg value to the mode the library wants here; - must do it earlier where we know the signedness of the arg. */ + must do it earlier where we know the signedness of the arg. */ gcc_assert (mode != BLKmode - && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); + && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); /* Make sure it is a reasonable operand for a move or push insn. */ if (!REG_P (val) && !MEM_P (val) - && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) - val = force_operand (val, NULL_RTX); + && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) + val = force_operand (val, NULL_RTX); if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1)) - { - rtx slot; - int must_copy - = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1); - - /* If this was a CONST function, it is now PURE since it now - reads memory. */ - if (flags & ECF_CONST) - { - flags &= ~ECF_CONST; - flags |= ECF_PURE; - } - - if (MEM_P (val) && !must_copy) - slot = val; - else - { - slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), - 0, 1, 1); - emit_move_insn (slot, val); - } - - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_USE (VOIDmode, slot), - call_fusage); - if (must_copy) - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_CLOBBER (VOIDmode, - slot), - call_fusage); - - mode = Pmode; - val = force_operand (XEXP (slot, 0), NULL_RTX); - } + { + rtx slot; + int must_copy + = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1); + + /* If this was a CONST function, it is now PURE since it now + reads memory. */ + if (flags & ECF_CONST) + { + flags &= ~ECF_CONST; + flags |= ECF_PURE; + } + + if (MEM_P (val) && !must_copy) + slot = val; + else + { + slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), + 0, 1, 1); + emit_move_insn (slot, val); + } + + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_USE (VOIDmode, slot), + call_fusage); + if (must_copy) + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_CLOBBER (VOIDmode, + slot), + call_fusage); + + mode = Pmode; + val = force_operand (XEXP (slot, 0), NULL_RTX); + } argvec[count].value = val; argvec[count].mode = mode; @@ -3622,22 +3534,22 @@ NULL_TREE, true); argvec[count].partial - = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1); + = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1); locate_and_pad_parm (mode, NULL_TREE, #ifdef STACK_PARMS_IN_REG_PARM_AREA - 1, + 1, #else - argvec[count].reg != 0, + argvec[count].reg != 0, #endif - argvec[count].partial, - NULL_TREE, &args_size, &argvec[count].locate); + argvec[count].partial, + NULL_TREE, &args_size, &argvec[count].locate); gcc_assert (!argvec[count].locate.size.var); if (argvec[count].reg == 0 || argvec[count].partial != 0 - || reg_parm_stack_space > 0) - args_size.constant += argvec[count].locate.size.constant; + || reg_parm_stack_space > 0) + args_size.constant += argvec[count].locate.size.constant; targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true); } @@ -3648,14 +3560,14 @@ original_args_size = args_size; args_size.constant = (((args_size.constant - + stack_pointer_delta - + STACK_BYTES - 1) - / STACK_BYTES - * STACK_BYTES) - - stack_pointer_delta); + + stack_pointer_delta + + STACK_BYTES - 1) + / STACK_BYTES + * STACK_BYTES) + - stack_pointer_delta); args_size.constant = MAX (args_size.constant, - reg_parm_stack_space); + reg_parm_stack_space); if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) args_size.constant -= reg_parm_stack_space; @@ -3673,65 +3585,65 @@ if (ACCUMULATE_OUTGOING_ARGS) { /* Since the stack pointer will never be pushed, it is possible for - the evaluation of a parm to clobber something we have already - written to the stack. Since most function calls on RISC machines - do not use the stack, this is uncommon, but must work correctly. - - Therefore, we save any area of the stack that was already written - and that we are using. Here we set up to do this by making a new - stack usage map from the old one. - - Another approach might be to try to reorder the argument - evaluations to avoid this conflicting stack usage. */ + the evaluation of a parm to clobber something we have already + written to the stack. Since most function calls on RISC machines + do not use the stack, this is uncommon, but must work correctly. + + Therefore, we save any area of the stack that was already written + and that we are using. Here we set up to do this by making a new + stack usage map from the old one. + + Another approach might be to try to reorder the argument + evaluations to avoid this conflicting stack usage. */ needed = args_size.constant; /* Since we will be writing into the entire argument area, the - map must be allocated for its entire size, not just the part that - is the responsibility of the caller. */ + map must be allocated for its entire size, not just the part that + is the responsibility of the caller. */ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - needed += reg_parm_stack_space; + needed += reg_parm_stack_space; #ifdef ARGS_GROW_DOWNWARD highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed + 1); + needed + 1); #else highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed); + needed); #endif stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); stack_usage_map = stack_usage_map_buf; if (initial_highest_arg_in_use) - memcpy (stack_usage_map, initial_stack_usage_map, - initial_highest_arg_in_use); + memcpy (stack_usage_map, initial_stack_usage_map, + initial_highest_arg_in_use); if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) - memset (&stack_usage_map[initial_highest_arg_in_use], 0, - highest_outgoing_arg_in_use - initial_highest_arg_in_use); + memset (&stack_usage_map[initial_highest_arg_in_use], 0, + highest_outgoing_arg_in_use - initial_highest_arg_in_use); needed = 0; /* We must be careful to use virtual regs before they're instantiated, - and real regs afterwards. Loop optimization, for example, can create - new libcalls after we've instantiated the virtual regs, and if we - use virtuals anyway, they won't match the rtl patterns. */ + and real regs afterwards. Loop optimization, for example, can create + new libcalls after we've instantiated the virtual regs, and if we + use virtuals anyway, they won't match the rtl patterns. */ if (virtuals_instantiated) - argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET); + argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET); else - argblock = virtual_outgoing_args_rtx; + argblock = virtual_outgoing_args_rtx; } else { if (!PUSH_ARGS) - argblock = push_block (GEN_INT (args_size.constant), 0, 0); + argblock = push_block (GEN_INT (args_size.constant), 0, 0); } /* If we push args individually in reverse order, perform stack alignment before the first push (the last arg). */ if (argblock == 0 && PUSH_ARGS_REVERSED) anti_adjust_stack (GEN_INT (args_size.constant - - original_args_size.constant)); + - original_args_size.constant)); if (PUSH_ARGS_REVERSED) { @@ -3748,10 +3660,10 @@ if (ACCUMULATE_OUTGOING_ARGS) { /* The argument list is the property of the called routine and it - may clobber it. If the fixed area has been used for previous - parameters, we must save and restore it. */ + may clobber it. If the fixed area has been used for previous + parameters, we must save and restore it. */ save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, - &low_to_save, &high_to_save); + &low_to_save, &high_to_save); } #endif @@ -3769,107 +3681,107 @@ int lower_bound = 0, upper_bound = 0, i; if (! (reg != 0 && partial == 0)) - { - if (ACCUMULATE_OUTGOING_ARGS) - { - /* If this is being stored into a pre-allocated, fixed-size, - stack area, save any previous data at that location. */ + { + if (ACCUMULATE_OUTGOING_ARGS) + { + /* If this is being stored into a pre-allocated, fixed-size, + stack area, save any previous data at that location. */ #ifdef ARGS_GROW_DOWNWARD - /* stack_slot is negative, but we want to index stack_usage_map - with positive values. */ - upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; - lower_bound = upper_bound - argvec[argnum].locate.size.constant; + /* stack_slot is negative, but we want to index stack_usage_map + with positive values. */ + upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; + lower_bound = upper_bound - argvec[argnum].locate.size.constant; #else - lower_bound = argvec[argnum].locate.slot_offset.constant; - upper_bound = lower_bound + argvec[argnum].locate.size.constant; + lower_bound = argvec[argnum].locate.slot_offset.constant; + upper_bound = lower_bound + argvec[argnum].locate.size.constant; #endif - i = lower_bound; - /* Don't worry about things in the fixed argument area; - it has already been saved. */ - if (i < reg_parm_stack_space) - i = reg_parm_stack_space; - while (i < upper_bound && stack_usage_map[i] == 0) - i++; - - if (i < upper_bound) - { - /* We need to make a save area. */ - unsigned int size - = argvec[argnum].locate.size.constant * BITS_PER_UNIT; - enum machine_mode save_mode - = mode_for_size (size, MODE_INT, 1); - rtx adr - = plus_constant (argblock, - argvec[argnum].locate.offset.constant); - rtx stack_area - = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); - - if (save_mode == BLKmode) - { - argvec[argnum].save_area - = assign_stack_temp (BLKmode, - argvec[argnum].locate.size.constant, - 0); - - emit_block_move (validize_mem (argvec[argnum].save_area), - stack_area, - GEN_INT (argvec[argnum].locate.size.constant), - BLOCK_OP_CALL_PARM); - } - else - { - argvec[argnum].save_area = gen_reg_rtx (save_mode); - - emit_move_insn (argvec[argnum].save_area, stack_area); - } - } - } - - emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, - partial, reg, 0, argblock, - GEN_INT (argvec[argnum].locate.offset.constant), - reg_parm_stack_space, - ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); - - /* Now mark the segment we just used. */ - if (ACCUMULATE_OUTGOING_ARGS) - for (i = lower_bound; i < upper_bound; i++) - stack_usage_map[i] = 1; - - NO_DEFER_POP; - - if ((flags & ECF_CONST) - || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) - { - rtx use; - - /* Indicate argument access so that alias.c knows that these - values are live. */ - if (argblock) - use = plus_constant (argblock, - argvec[argnum].locate.offset.constant); - else - /* When arguments are pushed, trying to tell alias.c where - exactly this argument is won't work, because the - auto-increment causes confusion. So we merely indicate - that we access something with a known mode somewhere on - the stack. */ - use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, - gen_rtx_SCRATCH (Pmode)); - use = gen_rtx_MEM (argvec[argnum].mode, use); - use = gen_rtx_USE (VOIDmode, use); - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); - } - } + i = lower_bound; + /* Don't worry about things in the fixed argument area; + it has already been saved. */ + if (i < reg_parm_stack_space) + i = reg_parm_stack_space; + while (i < upper_bound && stack_usage_map[i] == 0) + i++; + + if (i < upper_bound) + { + /* We need to make a save area. */ + unsigned int size + = argvec[argnum].locate.size.constant * BITS_PER_UNIT; + enum machine_mode save_mode + = mode_for_size (size, MODE_INT, 1); + rtx adr + = plus_constant (argblock, + argvec[argnum].locate.offset.constant); + rtx stack_area + = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); + + if (save_mode == BLKmode) + { + argvec[argnum].save_area + = assign_stack_temp (BLKmode, + argvec[argnum].locate.size.constant, + 0); + + emit_block_move (validize_mem (argvec[argnum].save_area), + stack_area, + GEN_INT (argvec[argnum].locate.size.constant), + BLOCK_OP_CALL_PARM); + } + else + { + argvec[argnum].save_area = gen_reg_rtx (save_mode); + + emit_move_insn (argvec[argnum].save_area, stack_area); + } + } + } + + emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, + partial, reg, 0, argblock, + GEN_INT (argvec[argnum].locate.offset.constant), + reg_parm_stack_space, + ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); + + /* Now mark the segment we just used. */ + if (ACCUMULATE_OUTGOING_ARGS) + for (i = lower_bound; i < upper_bound; i++) + stack_usage_map[i] = 1; + + NO_DEFER_POP; + + if ((flags & ECF_CONST) + || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) + { + rtx use; + + /* Indicate argument access so that alias.c knows that these + values are live. */ + if (argblock) + use = plus_constant (argblock, + argvec[argnum].locate.offset.constant); + else + /* When arguments are pushed, trying to tell alias.c where + exactly this argument is won't work, because the + auto-increment causes confusion. So we merely indicate + that we access something with a known mode somewhere on + the stack. */ + use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, + gen_rtx_SCRATCH (Pmode)); + use = gen_rtx_MEM (argvec[argnum].mode, use); + use = gen_rtx_USE (VOIDmode, use); + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); + } + } } /* If we pushed args in forward order, perform stack alignment after pushing the last arg. */ if (argblock == 0 && !PUSH_ARGS_REVERSED) anti_adjust_stack (GEN_INT (args_size.constant - - original_args_size.constant)); + - original_args_size.constant)); if (PUSH_ARGS_REVERSED) argnum = nargs - 1; @@ -3890,11 +3802,11 @@ int partial = argvec[argnum].partial; /* Handle calls that pass values in multiple non-contiguous - locations. The PA64 has examples of this for library calls. */ + locations. The PA64 has examples of this for library calls. */ if (reg != 0 && GET_CODE (reg) == PARALLEL) - emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); + emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); else if (reg != 0 && partial == 0) - emit_move_insn (reg, val); + emit_move_insn (reg, val); NO_DEFER_POP; } @@ -3904,42 +3816,42 @@ { rtx reg = argvec[count].reg; if (reg != 0 && GET_CODE (reg) == PARALLEL) - use_group_regs (&call_fusage, reg); + use_group_regs (&call_fusage, reg); else if (reg != 0) { - int partial = argvec[count].partial; - if (partial) - { - int nregs; + int partial = argvec[count].partial; + if (partial) + { + int nregs; gcc_assert (partial % UNITS_PER_WORD == 0); - nregs = partial / UNITS_PER_WORD; - use_regs (&call_fusage, REGNO (reg), nregs); - } - else - use_reg (&call_fusage, reg); - } + nregs = partial / UNITS_PER_WORD; + use_regs (&call_fusage, REGNO (reg), nregs); + } + else + use_reg (&call_fusage, reg); + } } /* Pass the function the address in which to return a structure value. */ if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) { emit_move_insn (struct_value, - force_reg (Pmode, - force_operand (XEXP (mem_value, 0), - NULL_RTX))); + force_reg (Pmode, + force_operand (XEXP (mem_value, 0), + NULL_RTX))); if (REG_P (struct_value)) - use_reg (&call_fusage, struct_value); + use_reg (&call_fusage, struct_value); } /* Don't allow popping to be deferred, since then cse'ing of library calls could delete a call and leave the pop. */ NO_DEFER_POP; valreg = (mem_value == 0 && outmode != VOIDmode - ? hard_libcall_value (outmode, orgfun) : NULL_RTX); + ? hard_libcall_value (outmode, orgfun) : NULL_RTX); /* Stack must be properly aligned now. */ gcc_assert (!(stack_pointer_delta - & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); + & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); before_call = get_last_insn (); @@ -3968,16 +3880,16 @@ if (flags & ECF_NORETURN) { /* The barrier note must be emitted - immediately after the CALL_INSN. Some ports emit more than - just a CALL_INSN above, so we must search for it here. */ + immediately after the CALL_INSN. Some ports emit more than + just a CALL_INSN above, so we must search for it here. */ rtx last = get_last_insn (); while (!CALL_P (last)) - { - last = PREV_INSN (last); - /* There was no CALL_INSN? */ - gcc_assert (last != before_call); - } + { + last = PREV_INSN (last); + /* There was no CALL_INSN? */ + gcc_assert (last != before_call); + } emit_barrier_after (last); } @@ -3991,64 +3903,64 @@ if (outmode != VOIDmode && retval) { if (mem_value) - { - if (value == 0) - value = mem_value; - if (value != mem_value) - emit_move_insn (value, mem_value); - } + { + if (value == 0) + value = mem_value; + if (value != mem_value) + emit_move_insn (value, mem_value); + } else if (GET_CODE (valreg) == PARALLEL) - { - if (value == 0) - value = gen_reg_rtx (outmode); - emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); - } + { + if (value == 0) + value = gen_reg_rtx (outmode); + emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); + } else - { - /* Convert to the proper mode if a promotion has been active. */ - if (GET_MODE (valreg) != outmode) - { - int unsignedp = TYPE_UNSIGNED (tfom); - - gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, - fndecl ? TREE_TYPE (fndecl) : fntype, 1) - == GET_MODE (valreg)); - valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); - } - - if (value != 0) - emit_move_insn (value, valreg); - else - value = valreg; - } + { + /* Convert to the proper mode if a promotion has been active. */ + if (GET_MODE (valreg) != outmode) + { + int unsignedp = TYPE_UNSIGNED (tfom); + + gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, + fndecl ? TREE_TYPE (fndecl) : fntype, 1) + == GET_MODE (valreg)); + valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); + } + + if (value != 0) + emit_move_insn (value, valreg); + else + value = valreg; + } } if (ACCUMULATE_OUTGOING_ARGS) { #ifdef REG_PARM_STACK_SPACE if (save_area) - restore_fixed_argument_area (save_area, argblock, - high_to_save, low_to_save); + restore_fixed_argument_area (save_area, argblock, + high_to_save, low_to_save); #endif /* If we saved any argument areas, restore them. */ for (count = 0; count < nargs; count++) - if (argvec[count].save_area) - { - enum machine_mode save_mode = GET_MODE (argvec[count].save_area); - rtx adr = plus_constant (argblock, - argvec[count].locate.offset.constant); - rtx stack_area = gen_rtx_MEM (save_mode, - memory_address (save_mode, adr)); - - if (save_mode == BLKmode) - emit_block_move (stack_area, - validize_mem (argvec[count].save_area), - GEN_INT (argvec[count].locate.size.constant), - BLOCK_OP_CALL_PARM); - else - emit_move_insn (stack_area, argvec[count].save_area); - } + if (argvec[count].save_area) + { + enum machine_mode save_mode = GET_MODE (argvec[count].save_area); + rtx adr = plus_constant (argblock, + argvec[count].locate.offset.constant); + rtx stack_area = gen_rtx_MEM (save_mode, + memory_address (save_mode, adr)); + + if (save_mode == BLKmode) + emit_block_move (stack_area, + validize_mem (argvec[count].save_area), + GEN_INT (argvec[count].locate.size.constant), + BLOCK_OP_CALL_PARM); + else + emit_move_insn (stack_area, argvec[count].save_area); + } highest_outgoing_arg_in_use = initial_highest_arg_in_use; stack_usage_map = initial_stack_usage_map; @@ -4073,7 +3985,7 @@ void emit_library_call (rtx orgfun, enum libcall_type fn_type, - enum machine_mode outmode, int nargs, ...) + enum machine_mode outmode, int nargs, ...) { va_list p; @@ -4092,15 +4004,15 @@ rtx emit_library_call_value (rtx orgfun, rtx value, - enum libcall_type fn_type, - enum machine_mode outmode, int nargs, ...) + enum libcall_type fn_type, + enum machine_mode outmode, int nargs, ...) { rtx result; va_list p; va_start (p, nargs); result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, - nargs, p); + nargs, p); va_end (p); return result; @@ -4127,7 +4039,7 @@ static int store_one_arg (struct arg_data *arg, rtx argblock, int flags, - int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) + int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) { tree pval = arg->tree_value; rtx reg = 0; @@ -4146,62 +4058,62 @@ if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)) { /* If this is being stored into a pre-allocated, fixed-size, stack area, - save any previous data at that location. */ + save any previous data at that location. */ if (argblock && ! variable_size && arg->stack) - { + { #ifdef ARGS_GROW_DOWNWARD - /* stack_slot is negative, but we want to index stack_usage_map - with positive values. */ - if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) - upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; - else - upper_bound = 0; - - lower_bound = upper_bound - arg->locate.size.constant; + /* stack_slot is negative, but we want to index stack_usage_map + with positive values. */ + if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) + upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; + else + upper_bound = 0; + + lower_bound = upper_bound - arg->locate.size.constant; #else - if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) - lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); - else - lower_bound = 0; - - upper_bound = lower_bound + arg->locate.size.constant; + if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) + lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); + else + lower_bound = 0; + + upper_bound = lower_bound + arg->locate.size.constant; #endif - i = lower_bound; - /* Don't worry about things in the fixed argument area; - it has already been saved. */ - if (i < reg_parm_stack_space) - i = reg_parm_stack_space; - while (i < upper_bound && stack_usage_map[i] == 0) - i++; - - if (i < upper_bound) - { - /* We need to make a save area. */ - unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; - enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1); - rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); - rtx stack_area = gen_rtx_MEM (save_mode, adr); - - if (save_mode == BLKmode) - { - tree ot = TREE_TYPE (arg->tree_value); - tree nt = build_qualified_type (ot, (TYPE_QUALS (ot) - | TYPE_QUAL_CONST)); - - arg->save_area = assign_temp (nt, 0, 1, 1); - preserve_temp_slots (arg->save_area); - emit_block_move (validize_mem (arg->save_area), stack_area, - GEN_INT (arg->locate.size.constant), - BLOCK_OP_CALL_PARM); - } - else - { - arg->save_area = gen_reg_rtx (save_mode); - emit_move_insn (arg->save_area, stack_area); - } - } - } + i = lower_bound; + /* Don't worry about things in the fixed argument area; + it has already been saved. */ + if (i < reg_parm_stack_space) + i = reg_parm_stack_space; + while (i < upper_bound && stack_usage_map[i] == 0) + i++; + + if (i < upper_bound) + { + /* We need to make a save area. */ + unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; + enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1); + rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); + rtx stack_area = gen_rtx_MEM (save_mode, adr); + + if (save_mode == BLKmode) + { + tree ot = TREE_TYPE (arg->tree_value); + tree nt = build_qualified_type (ot, (TYPE_QUALS (ot) + | TYPE_QUAL_CONST)); + + arg->save_area = assign_temp (nt, 0, 1, 1); + preserve_temp_slots (arg->save_area); + emit_block_move (validize_mem (arg->save_area), stack_area, + GEN_INT (arg->locate.size.constant), + BLOCK_OP_CALL_PARM); + } + else + { + arg->save_area = gen_reg_rtx (save_mode); + emit_move_insn (arg->save_area, stack_area); + } + } + } } /* If this isn't going to be placed on both the stack and in registers, @@ -4209,9 +4121,9 @@ if (! arg->pass_on_stack) { if (flags & ECF_SIBCALL) - reg = arg->tail_call_reg; + reg = arg->tail_call_reg; else - reg = arg->reg; + reg = arg->reg; partial = arg->partial; } @@ -4229,48 +4141,48 @@ if (arg->value == 0) { /* stack_arg_under_construction is nonzero if a function argument is - being evaluated directly into the outgoing argument list and - expand_call must take special action to preserve the argument list - if it is called recursively. - - For scalar function arguments stack_usage_map is sufficient to - determine which stack slots must be saved and restored. Scalar - arguments in general have pass_on_stack == 0. - - If this argument is initialized by a function which takes the - address of the argument (a C++ constructor or a C function - returning a BLKmode structure), then stack_usage_map is - insufficient and expand_call must push the stack around the - function call. Such arguments have pass_on_stack == 1. - - Note that it is always safe to set stack_arg_under_construction, - but this generates suboptimal code if set when not needed. */ + being evaluated directly into the outgoing argument list and + expand_call must take special action to preserve the argument list + if it is called recursively. + + For scalar function arguments stack_usage_map is sufficient to + determine which stack slots must be saved and restored. Scalar + arguments in general have pass_on_stack == 0. + + If this argument is initialized by a function which takes the + address of the argument (a C++ constructor or a C function + returning a BLKmode structure), then stack_usage_map is + insufficient and expand_call must push the stack around the + function call. Such arguments have pass_on_stack == 1. + + Note that it is always safe to set stack_arg_under_construction, + but this generates suboptimal code if set when not needed. */ if (arg->pass_on_stack) - stack_arg_under_construction++; + stack_arg_under_construction++; arg->value = expand_expr (pval, - (partial - || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) - ? NULL_RTX : arg->stack, - VOIDmode, EXPAND_STACK_PARM); + (partial + || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) + ? NULL_RTX : arg->stack, + VOIDmode, EXPAND_STACK_PARM); /* If we are promoting object (or for any other reason) the mode - doesn't agree, convert the mode. */ + doesn't agree, convert the mode. */ if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) - arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), - arg->value, arg->unsignedp); + arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), + arg->value, arg->unsignedp); if (arg->pass_on_stack) - stack_arg_under_construction--; + stack_arg_under_construction--; } /* Check for overlap with already clobbered argument area. */ if ((flags & ECF_SIBCALL) && MEM_P (arg->value) && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0), - arg->locate.size.constant)) + arg->locate.size.constant)) sibcall_failure = 1; /* Don't allow anything left on stack from computation @@ -4287,54 +4199,54 @@ unsigned int parm_align; /* Argument is a scalar, not entirely passed in registers. - (If part is passed in registers, arg->partial says how much - and emit_push_insn will take care of putting it there.) - - Push it, and if its size is less than the - amount of space allocated to it, - also bump stack pointer by the additional space. - Note that in C the default argument promotions - will prevent such mismatches. */ + (If part is passed in registers, arg->partial says how much + and emit_push_insn will take care of putting it there.) + + Push it, and if its size is less than the + amount of space allocated to it, + also bump stack pointer by the additional space. + Note that in C the default argument promotions + will prevent such mismatches. */ size = GET_MODE_SIZE (arg->mode); /* Compute how much space the push instruction will push. - On many machines, pushing a byte will advance the stack - pointer by a halfword. */ + On many machines, pushing a byte will advance the stack + pointer by a halfword. */ #ifdef PUSH_ROUNDING size = PUSH_ROUNDING (size); #endif used = size; /* Compute how much space the argument should get: - round up to a multiple of the alignment for arguments. */ + round up to a multiple of the alignment for arguments. */ if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) - used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) - / (PARM_BOUNDARY / BITS_PER_UNIT)) - * (PARM_BOUNDARY / BITS_PER_UNIT)); + used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) + / (PARM_BOUNDARY / BITS_PER_UNIT)) + * (PARM_BOUNDARY / BITS_PER_UNIT)); /* Compute the alignment of the pushed argument. */ parm_align = arg->locate.boundary; if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) - { - int pad = used - size; - if (pad) - { - unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT; - parm_align = MIN (parm_align, pad_align); - } - } + { + int pad = used - size; + if (pad) + { + unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT; + parm_align = MIN (parm_align, pad_align); + } + } /* This isn't already where we want it on the stack, so put it there. - This can either be done with push or copy insns. */ + This can either be done with push or copy insns. */ emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, - parm_align, partial, reg, used - size, argblock, - ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->locate.alignment_pad)); + parm_align, partial, reg, used - size, argblock, + ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, + ARGS_SIZE_RTX (arg->locate.alignment_pad)); /* Unless this is a partially-in-register argument, the argument is now - in the stack. */ + in the stack. */ if (partial == 0) - arg->value = arg->stack; + arg->value = arg->stack; } else { @@ -4345,115 +4257,115 @@ rtx size_rtx; /* Pushing a nonscalar. - If part is passed in registers, PARTIAL says how much - and emit_push_insn will take care of putting it there. */ + If part is passed in registers, PARTIAL says how much + and emit_push_insn will take care of putting it there. */ /* Round its size up to a multiple - of the allocation unit for arguments. */ + of the allocation unit for arguments. */ if (arg->locate.size.var != 0) - { - excess = 0; - size_rtx = ARGS_SIZE_RTX (arg->locate.size); - } + { + excess = 0; + size_rtx = ARGS_SIZE_RTX (arg->locate.size); + } else - { - /* PUSH_ROUNDING has no effect on us, because emit_push_insn - for BLKmode is careful to avoid it. */ - excess = (arg->locate.size.constant - - int_size_in_bytes (TREE_TYPE (pval)) - + partial); - size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), - NULL_RTX, TYPE_MODE (sizetype), - EXPAND_NORMAL); - } + { + /* PUSH_ROUNDING has no effect on us, because emit_push_insn + for BLKmode is careful to avoid it. */ + excess = (arg->locate.size.constant + - int_size_in_bytes (TREE_TYPE (pval)) + + partial); + size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), + NULL_RTX, TYPE_MODE (sizetype), + EXPAND_NORMAL); + } parm_align = arg->locate.boundary; /* When an argument is padded down, the block is aligned to - PARM_BOUNDARY, but the actual argument isn't. */ + PARM_BOUNDARY, but the actual argument isn't. */ if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) - { - if (arg->locate.size.var) - parm_align = BITS_PER_UNIT; - else if (excess) - { - unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; - parm_align = MIN (parm_align, excess_align); - } - } + { + if (arg->locate.size.var) + parm_align = BITS_PER_UNIT; + else if (excess) + { + unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; + parm_align = MIN (parm_align, excess_align); + } + } if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) - { - /* emit_push_insn might not work properly if arg->value and - argblock + arg->locate.offset areas overlap. */ - rtx x = arg->value; - int i = 0; - - if (XEXP (x, 0) == crtl->args.internal_arg_pointer - || (GET_CODE (XEXP (x, 0)) == PLUS - && XEXP (XEXP (x, 0), 0) == - crtl->args.internal_arg_pointer - && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) - { - if (XEXP (x, 0) != crtl->args.internal_arg_pointer) - i = INTVAL (XEXP (XEXP (x, 0), 1)); - - /* expand_call should ensure this. */ - gcc_assert (!arg->locate.offset.var - && arg->locate.size.var == 0 - && CONST_INT_P (size_rtx)); - - if (arg->locate.offset.constant > i) - { - if (arg->locate.offset.constant < i + INTVAL (size_rtx)) - sibcall_failure = 1; - } - else if (arg->locate.offset.constant < i) - { - /* Use arg->locate.size.constant instead of size_rtx - because we only care about the part of the argument - on the stack. */ - if (i < (arg->locate.offset.constant - + arg->locate.size.constant)) - sibcall_failure = 1; - } - else - { - /* Even though they appear to be at the same location, - if part of the outgoing argument is in registers, - they aren't really at the same location. Check for - this by making sure that the incoming size is the - same as the outgoing size. */ - if (arg->locate.size.constant != INTVAL (size_rtx)) - sibcall_failure = 1; - } - } - } + { + /* emit_push_insn might not work properly if arg->value and + argblock + arg->locate.offset areas overlap. */ + rtx x = arg->value; + int i = 0; + + if (XEXP (x, 0) == crtl->args.internal_arg_pointer + || (GET_CODE (XEXP (x, 0)) == PLUS + && XEXP (XEXP (x, 0), 0) == + crtl->args.internal_arg_pointer + && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) + { + if (XEXP (x, 0) != crtl->args.internal_arg_pointer) + i = INTVAL (XEXP (XEXP (x, 0), 1)); + + /* expand_call should ensure this. */ + gcc_assert (!arg->locate.offset.var + && arg->locate.size.var == 0 + && CONST_INT_P (size_rtx)); + + if (arg->locate.offset.constant > i) + { + if (arg->locate.offset.constant < i + INTVAL (size_rtx)) + sibcall_failure = 1; + } + else if (arg->locate.offset.constant < i) + { + /* Use arg->locate.size.constant instead of size_rtx + because we only care about the part of the argument + on the stack. */ + if (i < (arg->locate.offset.constant + + arg->locate.size.constant)) + sibcall_failure = 1; + } + else + { + /* Even though they appear to be at the same location, + if part of the outgoing argument is in registers, + they aren't really at the same location. Check for + this by making sure that the incoming size is the + same as the outgoing size. */ + if (arg->locate.size.constant != INTVAL (size_rtx)) + sibcall_failure = 1; + } + } + } emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, - parm_align, partial, reg, excess, argblock, - ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->locate.alignment_pad)); + parm_align, partial, reg, excess, argblock, + ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, + ARGS_SIZE_RTX (arg->locate.alignment_pad)); /* Unless this is a partially-in-register argument, the argument is now - in the stack. - - ??? Unlike the case above, in which we want the actual - address of the data, so that we can load it directly into a - register, here we want the address of the stack slot, so that - it's properly aligned for word-by-word copying or something - like that. It's not clear that this is always correct. */ + in the stack. + + ??? Unlike the case above, in which we want the actual + address of the data, so that we can load it directly into a + register, here we want the address of the stack slot, so that + it's properly aligned for word-by-word copying or something + like that. It's not clear that this is always correct. */ if (partial == 0) - arg->value = arg->stack_slot; + arg->value = arg->stack_slot; } if (arg->reg && GET_CODE (arg->reg) == PARALLEL) { tree type = TREE_TYPE (arg->tree_value); arg->parallel_value - = emit_group_load_into_temps (arg->reg, arg->value, type, - int_size_in_bytes (type)); + = emit_group_load_into_temps (arg->reg, arg->value, type, + int_size_in_bytes (type)); } /* Mark all slots this store used. */ @@ -4480,7 +4392,7 @@ bool must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED, - const_tree type) + const_tree type) { if (!type) return false; @@ -4521,7 +4433,7 @@ if (mode == BLKmode && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT) && (FUNCTION_ARG_PADDING (mode, type) - == (BYTES_BIG_ENDIAN ? upward : downward))) + == (BYTES_BIG_ENDIAN ? upward : downward))) return true; return false;