# HG changeset patch # User ryoma # Date 1272182625 -32400 # Node ID 60c1b2f8487adcc1ccb6a66a6028b8f6541a8a1c # Parent bd49c42ec43ee21eb98845fb212d029c6d729fa8 remove expand_cbc_goto, and added warnig process. diff -r bd49c42ec43e -r 60c1b2f8487a gcc/c-parser.c --- a/gcc/c-parser.c Mon Feb 15 17:39:45 2010 +0900 +++ b/gcc/c-parser.c Sun Apr 25 17:03:45 2010 +0900 @@ -3881,6 +3881,7 @@ static void c_parser_statement_after_labels (c_parser *parser) { + struct c_expr expr; location_t loc = c_parser_peek_token (parser)->location; tree stmt = NULL_TREE; bool in_if_block = parser->in_if_block; @@ -3931,29 +3932,28 @@ else #ifndef noCbC { - struct c_expr expr; if (c_parser_next_token_is (parser, CPP_NAME)) - { - tree id = c_parser_peek_token (parser)->value; - location_t loc = c_parser_peek_token (parser)->location; - /** build_external_ref (id,RID_CbC_CODE , loc); **/ - build_external_ref (loc, id, RID_CbC_CODE, &expr.original_type); - } + { + tree id = c_parser_peek_token (parser)->value; + location_t loc = c_parser_peek_token (parser)->location; + /** build_external_ref (id,RID_CbC_CODE , loc); **/ + build_external_ref (loc, id, RID_CbC_CODE, &expr.original_type); + } expr = c_parser_expr_no_commas (parser, NULL); if (TREE_CODE(expr.value) == CALL_EXPR ) - { - location_t loc = c_parser_peek_token (parser)->location; - cbc_replace_arguments (loc, expr.value); - - TREE_TYPE(expr.value) = void_type_node; - /*tree env = NULL_TREE;**/ - CbC_IS_CbC_GOTO (expr.value) = 1; - CALL_EXPR_TAILCALL (expr.value) = 1; - add_stmt(expr.value); - stmt = c_finish_return(loc, NULL_TREE, NULL_TREE); /* stmt = c_finish_return (0); */ - } + { + location_t loc = c_parser_peek_token (parser)->location; + cbc_replace_arguments (loc, expr.value); + + TREE_TYPE(expr.value) = void_type_node; + /*tree env = NULL_TREE;**/ + CbC_IS_CbC_GOTO (expr.value) = 1; + CALL_EXPR_TAILCALL (expr.value) = 1; + add_stmt(expr.value); + stmt = c_finish_return(loc, NULL_TREE, NULL_TREE); /* stmt = c_finish_return (0); */ + } else - c_parser_error (parser, "expected code segment jump or %<*%>"); + c_parser_error (parser, "expected code segment jump or %<*%>"); } #else c_parser_error (parser, "expected identifier or %<*%>"); @@ -5656,12 +5656,12 @@ break; } { - tree id = c_parser_peek_token (parser)->value; - c_parser_consume_token (parser); - expr.value = build_external_ref (loc, id, - (c_parser_peek_token (parser)->type - == CPP_OPEN_PAREN), - &expr.original_type); + tree id = c_parser_peek_token (parser)->value; + c_parser_consume_token (parser); + expr.value = build_external_ref (loc, id, + (c_parser_peek_token (parser)->type + == CPP_OPEN_PAREN), + &expr.original_type); } break; case CPP_OPEN_PAREN: diff -r bd49c42ec43e -r 60c1b2f8487a gcc/calls.c --- a/gcc/calls.c Mon Feb 15 17:39:45 2010 +0900 +++ b/gcc/calls.c Sun Apr 25 17:03:45 2010 +0900 @@ -128,33 +128,33 @@ static int stack_arg_under_construction; static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, - HOST_WIDE_INT, rtx, rtx, int, rtx, int, - CUMULATIVE_ARGS *); + HOST_WIDE_INT, rtx, rtx, int, rtx, int, + CUMULATIVE_ARGS *); static void precompute_register_parameters (int, struct arg_data *, int *); static int store_one_arg (struct arg_data *, rtx, int, int, int); static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); static int finalize_must_preallocate (int, int, struct arg_data *, - struct args_size *); + struct args_size *); static void precompute_arguments (int, struct arg_data *); static int compute_argument_block_size (int, struct args_size *, tree, tree, int); static void initialize_argument_information (int, struct arg_data *, - struct args_size *, int, - tree, tree, - tree, tree, CUMULATIVE_ARGS *, int, - rtx *, int *, int *, int *, - bool *, bool); + struct args_size *, int, + tree, tree, + tree, tree, CUMULATIVE_ARGS *, int, + rtx *, int *, int *, int *, + bool *, bool); static void compute_argument_addresses (struct arg_data *, rtx, int); static rtx rtx_for_function_call (tree, tree); static void load_register_parameters (struct arg_data *, int, rtx *, int, - int, int *); + int, int *); static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, - enum machine_mode, int, va_list); + enum machine_mode, int, va_list); static int special_function_p (const_tree, int); static int check_sibcall_argument_overlap_1 (rtx); static int check_sibcall_argument_overlap (rtx, struct arg_data *, int); static int combine_pending_stack_adjustment_and_call (int, struct args_size *, - unsigned int); + unsigned int); static tree split_complex_types (tree); #ifdef REG_PARM_STACK_SPACE @@ -171,7 +171,7 @@ rtx prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value, - rtx *call_fusage, int reg_parm_seen, int sibcallp) + rtx *call_fusage, int reg_parm_seen, int sibcallp) { /* Make a valid memory address and copy constants through pseudo-regs, but not for a constant address if -fno-function-cse. */ @@ -179,13 +179,13 @@ /* If we are using registers for parameters, force the function address into a register now. */ funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen) - ? force_not_mem (memory_address (FUNCTION_MODE, funexp)) - : memory_address (FUNCTION_MODE, funexp)); + ? force_not_mem (memory_address (FUNCTION_MODE, funexp)) + : memory_address (FUNCTION_MODE, funexp)); else if (! sibcallp) { #ifndef NO_FUNCTION_CSE if (optimize && ! flag_no_function_cse) - funexp = force_reg (Pmode, funexp); + funexp = force_reg (Pmode, funexp); #endif } @@ -199,7 +199,7 @@ emit_move_insn (chain, static_chain_value); if (REG_P (chain)) - use_reg (call_fusage, chain); + use_reg (call_fusage, chain); } return funexp; @@ -247,13 +247,13 @@ static void emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, - tree funtype ATTRIBUTE_UNUSED, - HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, - HOST_WIDE_INT rounded_stack_size, - HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, - rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, - int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, - CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED) + tree funtype ATTRIBUTE_UNUSED, + HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, + HOST_WIDE_INT rounded_stack_size, + HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, + rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, + int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, + CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED) { rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); rtx call_insn; @@ -279,16 +279,16 @@ rtx pat; /* If this subroutine pops its own args, record that in the call insn - if possible, for the sake of frame pointer elimination. */ + if possible, for the sake of frame pointer elimination. */ if (valreg) - pat = GEN_SIBCALL_VALUE_POP (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - n_pop); + pat = GEN_SIBCALL_VALUE_POP (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + n_pop); else - pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, n_pop); + pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, n_pop); emit_call_insn (pat); already_popped = 1; @@ -312,15 +312,15 @@ rtx pat; /* If this subroutine pops its own args, record that in the call insn - if possible, for the sake of frame pointer elimination. */ + if possible, for the sake of frame pointer elimination. */ if (valreg) - pat = GEN_CALL_VALUE_POP (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, n_pop); + pat = GEN_CALL_VALUE_POP (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, n_pop); else - pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, n_pop); + pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, n_pop); emit_call_insn (pat); already_popped = 1; @@ -333,14 +333,14 @@ && HAVE_sibcall && HAVE_sibcall_value) { if (valreg) - emit_call_insn (GEN_SIBCALL_VALUE (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, - next_arg_reg, NULL_RTX)); + emit_call_insn (GEN_SIBCALL_VALUE (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, + next_arg_reg, NULL_RTX)); else - emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - GEN_INT (struct_value_size))); + emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + GEN_INT (struct_value_size))); } else #endif @@ -349,14 +349,14 @@ if (HAVE_call && HAVE_call_value) { if (valreg) - emit_call_insn (GEN_CALL_VALUE (valreg, - gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - NULL_RTX)); + emit_call_insn (GEN_CALL_VALUE (valreg, + gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + NULL_RTX)); else - emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), - rounded_stack_size_rtx, next_arg_reg, - GEN_INT (struct_value_size))); + emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), + rounded_stack_size_rtx, next_arg_reg, + GEN_INT (struct_value_size))); } else #endif @@ -406,10 +406,10 @@ if (n_popped > 0) { if (!already_popped) - CALL_INSN_FUNCTION_USAGE (call_insn) - = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), - CALL_INSN_FUNCTION_USAGE (call_insn)); + CALL_INSN_FUNCTION_USAGE (call_insn) + = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), + CALL_INSN_FUNCTION_USAGE (call_insn)); rounded_stack_size -= n_popped; rounded_stack_size_rtx = GEN_INT (rounded_stack_size); stack_pointer_delta -= n_popped; @@ -422,23 +422,23 @@ if (!ACCUMULATE_OUTGOING_ARGS) { /* If returning from the subroutine does not automatically pop the args, - we need an instruction to pop them sooner or later. - Perhaps do it now; perhaps just record how much space to pop later. - - If returning from the subroutine does pop the args, indicate that the - stack pointer will be changed. */ + we need an instruction to pop them sooner or later. + Perhaps do it now; perhaps just record how much space to pop later. + + If returning from the subroutine does pop the args, indicate that the + stack pointer will be changed. */ if (rounded_stack_size != 0) - { - if (ecf_flags & ECF_NORETURN) - /* Just pretend we did the pop. */ - stack_pointer_delta -= rounded_stack_size; - else if (flag_defer_pop && inhibit_defer_pop == 0 - && ! (ecf_flags & (ECF_CONST | ECF_PURE))) - pending_stack_adjust += rounded_stack_size; - else - adjust_stack (rounded_stack_size_rtx); - } + { + if (ecf_flags & ECF_NORETURN) + /* Just pretend we did the pop. */ + stack_pointer_delta -= rounded_stack_size; + else if (flag_defer_pop && inhibit_defer_pop == 0 + && ! (ecf_flags & (ECF_CONST | ECF_PURE))) + pending_stack_adjust += rounded_stack_size; + else + adjust_stack (rounded_stack_size_rtx); + } } /* When we accumulate outgoing args, we must avoid any stack manipulations. Restore the stack pointer to its original value now. Usually @@ -472,71 +472,71 @@ if (fndecl && DECL_NAME (fndecl) && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17 /* Exclude functions not at the file scope, or not `extern', - since they are not the magic functions we would otherwise - think they are. - FIXME: this should be handled with attributes, not with this - hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong - because you can declare fork() inside a function if you - wish. */ + since they are not the magic functions we would otherwise + think they are. + FIXME: this should be handled with attributes, not with this + hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong + because you can declare fork() inside a function if you + wish. */ && (DECL_CONTEXT (fndecl) == NULL_TREE - || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) + || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) && TREE_PUBLIC (fndecl)) { const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl)); const char *tname = name; /* We assume that alloca will always be called by name. It - makes no sense to pass it as a pointer-to-function to - anything that does not understand its behavior. */ + makes no sense to pass it as a pointer-to-function to + anything that does not understand its behavior. */ if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6 - && name[0] == 'a' - && ! strcmp (name, "alloca")) - || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16 - && name[0] == '_' - && ! strcmp (name, "__builtin_alloca")))) - flags |= ECF_MAY_BE_ALLOCA; + && name[0] == 'a' + && ! strcmp (name, "alloca")) + || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16 + && name[0] == '_' + && ! strcmp (name, "__builtin_alloca")))) + flags |= ECF_MAY_BE_ALLOCA; /* Disregard prefix _, __, __x or __builtin_. */ if (name[0] == '_') - { - if (name[1] == '_' - && name[2] == 'b' - && !strncmp (name + 3, "uiltin_", 7)) - tname += 10; - else if (name[1] == '_' && name[2] == 'x') - tname += 3; - else if (name[1] == '_') - tname += 2; - else - tname += 1; - } + { + if (name[1] == '_' + && name[2] == 'b' + && !strncmp (name + 3, "uiltin_", 7)) + tname += 10; + else if (name[1] == '_' && name[2] == 'x') + tname += 3; + else if (name[1] == '_') + tname += 2; + else + tname += 1; + } if (tname[0] == 's') - { - if ((tname[1] == 'e' - && (! strcmp (tname, "setjmp") - || ! strcmp (tname, "setjmp_syscall"))) - || (tname[1] == 'i' - && ! strcmp (tname, "sigsetjmp")) - || (tname[1] == 'a' - && ! strcmp (tname, "savectx"))) - flags |= ECF_RETURNS_TWICE; - - if (tname[1] == 'i' - && ! strcmp (tname, "siglongjmp")) - flags |= ECF_NORETURN; - } + { + if ((tname[1] == 'e' + && (! strcmp (tname, "setjmp") + || ! strcmp (tname, "setjmp_syscall"))) + || (tname[1] == 'i' + && ! strcmp (tname, "sigsetjmp")) + || (tname[1] == 'a' + && ! strcmp (tname, "savectx"))) + flags |= ECF_RETURNS_TWICE; + + if (tname[1] == 'i' + && ! strcmp (tname, "siglongjmp")) + flags |= ECF_NORETURN; + } else if ((tname[0] == 'q' && tname[1] == 's' - && ! strcmp (tname, "qsetjmp")) - || (tname[0] == 'v' && tname[1] == 'f' - && ! strcmp (tname, "vfork")) - || (tname[0] == 'g' && tname[1] == 'e' - && !strcmp (tname, "getcontext"))) - flags |= ECF_RETURNS_TWICE; + && ! strcmp (tname, "qsetjmp")) + || (tname[0] == 'v' && tname[1] == 'f' + && ! strcmp (tname, "vfork")) + || (tname[0] == 'g' && tname[1] == 'e' + && !strcmp (tname, "getcontext"))) + flags |= ECF_RETURNS_TWICE; else if (tname[0] == 'l' && tname[1] == 'o' - && ! strcmp (tname, "longjmp")) - flags |= ECF_NORETURN; + && ! strcmp (tname, "longjmp")) + flags |= ECF_NORETURN; } return flags; @@ -577,7 +577,7 @@ && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL) && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0) - & ECF_MAY_BE_ALLOCA)) + & ECF_MAY_BE_ALLOCA)) return true; return false; } @@ -593,25 +593,25 @@ { /* The function exp may have the `malloc' attribute. */ if (DECL_IS_MALLOC (exp)) - flags |= ECF_MALLOC; + flags |= ECF_MALLOC; /* The function exp may have the `returns_twice' attribute. */ if (DECL_IS_RETURNS_TWICE (exp)) - flags |= ECF_RETURNS_TWICE; + flags |= ECF_RETURNS_TWICE; /* Process the pure and const attributes. */ if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)) - flags |= ECF_CONST; + flags |= ECF_CONST; if (DECL_PURE_P (exp)) - flags |= ECF_PURE; + flags |= ECF_PURE; if (DECL_LOOPING_CONST_OR_PURE_P (exp)) - flags |= ECF_LOOPING_CONST_OR_PURE; + flags |= ECF_LOOPING_CONST_OR_PURE; if (DECL_IS_NOVOPS (exp)) - flags |= ECF_NOVOPS; + flags |= ECF_NOVOPS; if (TREE_NOTHROW (exp)) - flags |= ECF_NOTHROW; + flags |= ECF_NOTHROW; flags = special_function_p (exp, flags); } @@ -638,9 +638,9 @@ { t = TREE_TYPE (CALL_EXPR_FN (t)); if (t && TREE_CODE (t) == POINTER_TYPE) - flags = flags_from_decl_or_type (TREE_TYPE (t)); + flags = flags_from_decl_or_type (TREE_TYPE (t)); else - flags = 0; + flags = 0; } return flags; @@ -655,7 +655,7 @@ static void precompute_register_parameters (int num_actuals, struct arg_data *args, - int *reg_parm_seen) + int *reg_parm_seen) { int i; @@ -664,59 +664,59 @@ for (i = 0; i < num_actuals; i++) if (args[i].reg != 0 && ! args[i].pass_on_stack) { - *reg_parm_seen = 1; - - if (args[i].value == 0) - { - push_temp_slots (); - args[i].value = expand_normal (args[i].tree_value); - preserve_temp_slots (args[i].value); - pop_temp_slots (); - } - - /* If the value is a non-legitimate constant, force it into a - pseudo now. TLS symbols sometimes need a call to resolve. */ - if (CONSTANT_P (args[i].value) - && !LEGITIMATE_CONSTANT_P (args[i].value)) - args[i].value = force_reg (args[i].mode, args[i].value); - - /* If we are to promote the function arg to a wider mode, - do it now. */ - - if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value))) - args[i].value - = convert_modes (args[i].mode, - TYPE_MODE (TREE_TYPE (args[i].tree_value)), - args[i].value, args[i].unsignedp); - - /* If we're going to have to load the value by parts, pull the - parts into pseudos. The part extraction process can involve - non-trivial computation. */ - if (GET_CODE (args[i].reg) == PARALLEL) - { - tree type = TREE_TYPE (args[i].tree_value); - args[i].parallel_value - = emit_group_load_into_temps (args[i].reg, args[i].value, - type, int_size_in_bytes (type)); - } - - /* If the value is expensive, and we are inside an appropriately - short loop, put the value into a pseudo and then put the pseudo - into the hard reg. - - For small register classes, also do this if this call uses - register parameters. This is to avoid reload conflicts while - loading the parameters registers. */ - - else if ((! (REG_P (args[i].value) - || (GET_CODE (args[i].value) == SUBREG - && REG_P (SUBREG_REG (args[i].value))))) - && args[i].mode != BLKmode - && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ()) - > COSTS_N_INSNS (1) - && ((SMALL_REGISTER_CLASSES && *reg_parm_seen) - || optimize)) - args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); + *reg_parm_seen = 1; + + if (args[i].value == 0) + { + push_temp_slots (); + args[i].value = expand_normal (args[i].tree_value); + preserve_temp_slots (args[i].value); + pop_temp_slots (); + } + + /* If the value is a non-legitimate constant, force it into a + pseudo now. TLS symbols sometimes need a call to resolve. */ + if (CONSTANT_P (args[i].value) + && !LEGITIMATE_CONSTANT_P (args[i].value)) + args[i].value = force_reg (args[i].mode, args[i].value); + + /* If we are to promote the function arg to a wider mode, + do it now. */ + + if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value))) + args[i].value + = convert_modes (args[i].mode, + TYPE_MODE (TREE_TYPE (args[i].tree_value)), + args[i].value, args[i].unsignedp); + + /* If we're going to have to load the value by parts, pull the + parts into pseudos. The part extraction process can involve + non-trivial computation. */ + if (GET_CODE (args[i].reg) == PARALLEL) + { + tree type = TREE_TYPE (args[i].tree_value); + args[i].parallel_value + = emit_group_load_into_temps (args[i].reg, args[i].value, + type, int_size_in_bytes (type)); + } + + /* If the value is expensive, and we are inside an appropriately + short loop, put the value into a pseudo and then put the pseudo + into the hard reg. + + For small register classes, also do this if this call uses + register parameters. This is to avoid reload conflicts while + loading the parameters registers. */ + + else if ((! (REG_P (args[i].value) + || (GET_CODE (args[i].value) == SUBREG + && REG_P (SUBREG_REG (args[i].value))))) + && args[i].mode != BLKmode + && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ()) + > COSTS_N_INSNS (1) + && ((SMALL_REGISTER_CLASSES && *reg_parm_seen) + || optimize)) + args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); } } @@ -743,51 +743,51 @@ for (low = 0; low < high; low++) if (stack_usage_map[low] != 0) { - int num_to_save; - enum machine_mode save_mode; - int delta; - rtx stack_area; - rtx save_area; - - while (stack_usage_map[--high] == 0) - ; - - *low_to_save = low; - *high_to_save = high; - - num_to_save = high - low + 1; - save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); - - /* If we don't have the required alignment, must do this - in BLKmode. */ - if ((low & (MIN (GET_MODE_SIZE (save_mode), - BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) - save_mode = BLKmode; + int num_to_save; + enum machine_mode save_mode; + int delta; + rtx stack_area; + rtx save_area; + + while (stack_usage_map[--high] == 0) + ; + + *low_to_save = low; + *high_to_save = high; + + num_to_save = high - low + 1; + save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); + + /* If we don't have the required alignment, must do this + in BLKmode. */ + if ((low & (MIN (GET_MODE_SIZE (save_mode), + BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) + save_mode = BLKmode; #ifdef ARGS_GROW_DOWNWARD - delta = -high; + delta = -high; #else - delta = low; + delta = low; #endif - stack_area = gen_rtx_MEM (save_mode, - memory_address (save_mode, - plus_constant (argblock, - delta))); - - set_mem_align (stack_area, PARM_BOUNDARY); - if (save_mode == BLKmode) - { - save_area = assign_stack_temp (BLKmode, num_to_save, 0); - emit_block_move (validize_mem (save_area), stack_area, - GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); - } - else - { - save_area = gen_reg_rtx (save_mode); - emit_move_insn (save_area, stack_area); - } - - return save_area; + stack_area = gen_rtx_MEM (save_mode, + memory_address (save_mode, + plus_constant (argblock, + delta))); + + set_mem_align (stack_area, PARM_BOUNDARY); + if (save_mode == BLKmode) + { + save_area = assign_stack_temp (BLKmode, num_to_save, 0); + emit_block_move (validize_mem (save_area), stack_area, + GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); + } + else + { + save_area = gen_reg_rtx (save_mode); + emit_move_insn (save_area, stack_area); + } + + return save_area; } return NULL_RTX; @@ -806,16 +806,16 @@ delta = low_to_save; #endif stack_area = gen_rtx_MEM (save_mode, - memory_address (save_mode, - plus_constant (argblock, delta))); + memory_address (save_mode, + plus_constant (argblock, delta))); set_mem_align (stack_area, PARM_BOUNDARY); if (save_mode != BLKmode) emit_move_insn (stack_area, save_area); else emit_block_move (stack_area, validize_mem (save_area), - GEN_INT (high_to_save - low_to_save + 1), - BLOCK_OP_CALL_PARM); + GEN_INT (high_to_save - low_to_save + 1), + BLOCK_OP_CALL_PARM); } #endif /* REG_PARM_STACK_SPACE */ @@ -835,69 +835,69 @@ for (i = 0; i < num_actuals; i++) if (args[i].reg != 0 && ! args[i].pass_on_stack - && args[i].mode == BLKmode - && MEM_P (args[i].value) - && (MEM_ALIGN (args[i].value) - < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) + && args[i].mode == BLKmode + && MEM_P (args[i].value) + && (MEM_ALIGN (args[i].value) + < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) { - int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - int endian_correction = 0; - - if (args[i].partial) - { - gcc_assert (args[i].partial % UNITS_PER_WORD == 0); - args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; - } - else - { - args[i].n_aligned_regs - = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; - } - - args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); - - /* Structures smaller than a word are normally aligned to the - least significant byte. On a BYTES_BIG_ENDIAN machine, - this means we must skip the empty high order bytes when - calculating the bit offset. */ - if (bytes < UNITS_PER_WORD + int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + int endian_correction = 0; + + if (args[i].partial) + { + gcc_assert (args[i].partial % UNITS_PER_WORD == 0); + args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; + } + else + { + args[i].n_aligned_regs + = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; + } + + args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); + + /* Structures smaller than a word are normally aligned to the + least significant byte. On a BYTES_BIG_ENDIAN machine, + this means we must skip the empty high order bytes when + calculating the bit offset. */ + if (bytes < UNITS_PER_WORD #ifdef BLOCK_REG_PADDING - && (BLOCK_REG_PADDING (args[i].mode, - TREE_TYPE (args[i].tree_value), 1) - == downward) + && (BLOCK_REG_PADDING (args[i].mode, + TREE_TYPE (args[i].tree_value), 1) + == downward) #else - && BYTES_BIG_ENDIAN + && BYTES_BIG_ENDIAN #endif - ) - endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; - - for (j = 0; j < args[i].n_aligned_regs; j++) - { - rtx reg = gen_reg_rtx (word_mode); - rtx word = operand_subword_force (args[i].value, j, BLKmode); - int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD); - - args[i].aligned_regs[j] = reg; - word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX, - word_mode, word_mode); - - /* There is no need to restrict this code to loading items - in TYPE_ALIGN sized hunks. The bitfield instructions can - load up entire word sized registers efficiently. - - ??? This may not be needed anymore. - We use to emit a clobber here but that doesn't let later - passes optimize the instructions we emit. By storing 0 into - the register later passes know the first AND to zero out the - bitfield being set in the register is unnecessary. The store - of 0 will be deleted as will at least the first AND. */ - - emit_move_insn (reg, const0_rtx); - - bytes -= bitsize / BITS_PER_UNIT; - store_bit_field (reg, bitsize, endian_correction, word_mode, - word); - } + ) + endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; + + for (j = 0; j < args[i].n_aligned_regs; j++) + { + rtx reg = gen_reg_rtx (word_mode); + rtx word = operand_subword_force (args[i].value, j, BLKmode); + int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD); + + args[i].aligned_regs[j] = reg; + word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX, + word_mode, word_mode); + + /* There is no need to restrict this code to loading items + in TYPE_ALIGN sized hunks. The bitfield instructions can + load up entire word sized registers efficiently. + + ??? This may not be needed anymore. + We use to emit a clobber here but that doesn't let later + passes optimize the instructions we emit. By storing 0 into + the register later passes know the first AND to zero out the + bitfield being set in the register is unnecessary. The store + of 0 will be deleted as will at least the first AND. */ + + emit_move_insn (reg, const0_rtx); + + bytes -= bitsize / BITS_PER_UNIT; + store_bit_field (reg, bitsize, endian_correction, word_mode, + word); + } } } @@ -933,16 +933,16 @@ static void initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, - struct arg_data *args, - struct args_size *args_size, - int n_named_args ATTRIBUTE_UNUSED, - tree exp, tree struct_value_addr_value, - tree fndecl, tree fntype, - CUMULATIVE_ARGS *args_so_far, - int reg_parm_stack_space, - rtx *old_stack_level, int *old_pending_adj, - int *must_preallocate, int *ecf_flags, - bool *may_tailcall, bool call_from_thunk_p) + struct arg_data *args, + struct args_size *args_size, + int n_named_args ATTRIBUTE_UNUSED, + tree exp, tree struct_value_addr_value, + tree fndecl, tree fntype, + CUMULATIVE_ARGS *args_so_far, + int reg_parm_stack_space, + rtx *old_stack_level, int *old_pending_adj, + int *must_preallocate, int *ecf_flags, + bool *may_tailcall, bool call_from_thunk_p) { location_t loc = EXPR_LOCATION (exp); /* 1 if scanning parms front to back, -1 if scanning back to front. */ @@ -964,7 +964,7 @@ { i = num_actuals - 1, inc = -1; /* In this case, must reverse order of args - so that we compute and push the last arg first. */ + so that we compute and push the last arg first. */ } else { @@ -980,25 +980,25 @@ if (struct_value_addr_value) { - args[j].tree_value = struct_value_addr_value; - j += inc; + args[j].tree_value = struct_value_addr_value; + j += inc; } FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) { - tree argtype = TREE_TYPE (arg); - if (targetm.calls.split_complex_arg - && argtype - && TREE_CODE (argtype) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (argtype)) - { - tree subtype = TREE_TYPE (argtype); - args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); - j += inc; - args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); - } - else - args[j].tree_value = arg; - j += inc; + tree argtype = TREE_TYPE (arg); + if (targetm.calls.split_complex_arg + && argtype + && TREE_CODE (argtype) == COMPLEX_TYPE + && targetm.calls.split_complex_arg (argtype)) + { + tree subtype = TREE_TYPE (argtype); + args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); + j += inc; + args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); + } + else + args[j].tree_value = arg; + j += inc; } } @@ -1011,186 +1011,186 @@ /* Replace erroneous argument with constant zero. */ if (type == error_mark_node || !COMPLETE_TYPE_P (type)) - args[i].tree_value = integer_zero_node, type = integer_type_node; + args[i].tree_value = integer_zero_node, type = integer_type_node; /* If TYPE is a transparent union, pass things the way we would - pass the first field of the union. We have already verified that - the modes are the same. */ + pass the first field of the union. We have already verified that + the modes are the same. */ if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type)) - type = TREE_TYPE (TYPE_FIELDS (type)); + type = TREE_TYPE (TYPE_FIELDS (type)); /* Decide where to pass this arg. - args[i].reg is nonzero if all or part is passed in registers. - - args[i].partial is nonzero if part but not all is passed in registers, - and the exact value says how many bytes are passed in registers. - - args[i].pass_on_stack is nonzero if the argument must at least be - computed on the stack. It may then be loaded back into registers - if args[i].reg is nonzero. - - These decisions are driven by the FUNCTION_... macros and must agree - with those made by function.c. */ + args[i].reg is nonzero if all or part is passed in registers. + + args[i].partial is nonzero if part but not all is passed in registers, + and the exact value says how many bytes are passed in registers. + + args[i].pass_on_stack is nonzero if the argument must at least be + computed on the stack. It may then be loaded back into registers + if args[i].reg is nonzero. + + These decisions are driven by the FUNCTION_... macros and must agree + with those made by function.c. */ /* See if this argument should be passed by invisible reference. */ if (pass_by_reference (args_so_far, TYPE_MODE (type), - type, argpos < n_named_args)) - { - bool callee_copies; - tree base; - - callee_copies - = reference_callee_copied (args_so_far, TYPE_MODE (type), - type, argpos < n_named_args); - - /* If we're compiling a thunk, pass through invisible references - instead of making a copy. */ - if (call_from_thunk_p - || (callee_copies - && !TREE_ADDRESSABLE (type) - && (base = get_base_address (args[i].tree_value)) - && TREE_CODE (base) != SSA_NAME - && (!DECL_P (base) || MEM_P (DECL_RTL (base))))) - { - /* We can't use sibcalls if a callee-copied argument is - stored in the current function's frame. */ - if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) - *may_tailcall = false; - - args[i].tree_value = build_fold_addr_expr_loc (loc, - args[i].tree_value); - type = TREE_TYPE (args[i].tree_value); - - if (*ecf_flags & ECF_CONST) - *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); - } - else - { - /* We make a copy of the object and pass the address to the - function being called. */ - rtx copy; - - if (!COMPLETE_TYPE_P (type) - || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST - || (flag_stack_check == GENERIC_STACK_CHECK - && compare_tree_int (TYPE_SIZE_UNIT (type), - STACK_CHECK_MAX_VAR_SIZE) > 0)) - { - /* This is a variable-sized object. Make space on the stack - for it. */ - rtx size_rtx = expr_size (args[i].tree_value); - - if (*old_stack_level == 0) - { - emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX); - *old_pending_adj = pending_stack_adjust; - pending_stack_adjust = 0; - } - - copy = gen_rtx_MEM (BLKmode, - allocate_dynamic_stack_space - (size_rtx, NULL_RTX, TYPE_ALIGN (type))); - set_mem_attributes (copy, type, 1); - } - else - copy = assign_temp (type, 0, 1, 0); - - store_expr (args[i].tree_value, copy, 0, false); - - /* Just change the const function to pure and then let - the next test clear the pure based on - callee_copies. */ - if (*ecf_flags & ECF_CONST) - { - *ecf_flags &= ~ECF_CONST; - *ecf_flags |= ECF_PURE; - } - - if (!callee_copies && *ecf_flags & ECF_PURE) - *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); - - args[i].tree_value - = build_fold_addr_expr_loc (loc, make_tree (type, copy)); - type = TREE_TYPE (args[i].tree_value); - *may_tailcall = false; - } - } + type, argpos < n_named_args)) + { + bool callee_copies; + tree base; + + callee_copies + = reference_callee_copied (args_so_far, TYPE_MODE (type), + type, argpos < n_named_args); + + /* If we're compiling a thunk, pass through invisible references + instead of making a copy. */ + if (call_from_thunk_p + || (callee_copies + && !TREE_ADDRESSABLE (type) + && (base = get_base_address (args[i].tree_value)) + && TREE_CODE (base) != SSA_NAME + && (!DECL_P (base) || MEM_P (DECL_RTL (base))))) + { + /* We can't use sibcalls if a callee-copied argument is + stored in the current function's frame. */ + if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) + *may_tailcall = false; + + args[i].tree_value = build_fold_addr_expr_loc (loc, + args[i].tree_value); + type = TREE_TYPE (args[i].tree_value); + + if (*ecf_flags & ECF_CONST) + *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); + } + else + { + /* We make a copy of the object and pass the address to the + function being called. */ + rtx copy; + + if (!COMPLETE_TYPE_P (type) + || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST + || (flag_stack_check == GENERIC_STACK_CHECK + && compare_tree_int (TYPE_SIZE_UNIT (type), + STACK_CHECK_MAX_VAR_SIZE) > 0)) + { + /* This is a variable-sized object. Make space on the stack + for it. */ + rtx size_rtx = expr_size (args[i].tree_value); + + if (*old_stack_level == 0) + { + emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX); + *old_pending_adj = pending_stack_adjust; + pending_stack_adjust = 0; + } + + copy = gen_rtx_MEM (BLKmode, + allocate_dynamic_stack_space + (size_rtx, NULL_RTX, TYPE_ALIGN (type))); + set_mem_attributes (copy, type, 1); + } + else + copy = assign_temp (type, 0, 1, 0); + + store_expr (args[i].tree_value, copy, 0, false); + + /* Just change the const function to pure and then let + the next test clear the pure based on + callee_copies. */ + if (*ecf_flags & ECF_CONST) + { + *ecf_flags &= ~ECF_CONST; + *ecf_flags |= ECF_PURE; + } + + if (!callee_copies && *ecf_flags & ECF_PURE) + *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); + + args[i].tree_value + = build_fold_addr_expr_loc (loc, make_tree (type, copy)); + type = TREE_TYPE (args[i].tree_value); + *may_tailcall = false; + } + } unsignedp = TYPE_UNSIGNED (type); mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, - fndecl ? TREE_TYPE (fndecl) : fntype, 0); + fndecl ? TREE_TYPE (fndecl) : fntype, 0); args[i].unsignedp = unsignedp; args[i].mode = mode; args[i].reg = FUNCTION_ARG (*args_so_far, mode, type, - argpos < n_named_args); + argpos < n_named_args); #ifdef FUNCTION_INCOMING_ARG /* If this is a sibling call and the machine has register windows, the - register window has to be unwinded before calling the routine, so - arguments have to go into the incoming registers. */ + register window has to be unwinded before calling the routine, so + arguments have to go into the incoming registers. */ args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type, - argpos < n_named_args); + argpos < n_named_args); #else args[i].tail_call_reg = args[i].reg; #endif if (args[i].reg) - args[i].partial - = targetm.calls.arg_partial_bytes (args_so_far, mode, type, - argpos < n_named_args); + args[i].partial + = targetm.calls.arg_partial_bytes (args_so_far, mode, type, + argpos < n_named_args); args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type); /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), - it means that we are to pass this arg in the register(s) designated - by the PARALLEL, but also to pass it in the stack. */ + it means that we are to pass this arg in the register(s) designated + by the PARALLEL, but also to pass it in the stack. */ if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL - && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) - args[i].pass_on_stack = 1; + && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) + args[i].pass_on_stack = 1; /* If this is an addressable type, we must preallocate the stack - since we must evaluate the object into its final location. - - If this is to be passed in both registers and the stack, it is simpler - to preallocate. */ + since we must evaluate the object into its final location. + + If this is to be passed in both registers and the stack, it is simpler + to preallocate. */ if (TREE_ADDRESSABLE (type) - || (args[i].pass_on_stack && args[i].reg != 0)) - *must_preallocate = 1; + || (args[i].pass_on_stack && args[i].reg != 0)) + *must_preallocate = 1; /* Compute the stack-size of this argument. */ if (args[i].reg == 0 || args[i].partial != 0 - || reg_parm_stack_space > 0 - || args[i].pass_on_stack) - locate_and_pad_parm (mode, type, + || reg_parm_stack_space > 0 + || args[i].pass_on_stack) + locate_and_pad_parm (mode, type, #ifdef STACK_PARMS_IN_REG_PARM_AREA - 1, + 1, #else - args[i].reg != 0, + args[i].reg != 0, #endif - args[i].pass_on_stack ? 0 : args[i].partial, - fndecl, args_size, &args[i].locate); + args[i].pass_on_stack ? 0 : args[i].partial, + fndecl, args_size, &args[i].locate); #ifdef BLOCK_REG_PADDING else - /* The argument is passed entirely in registers. See at which - end it should be padded. */ - args[i].locate.where_pad = - BLOCK_REG_PADDING (mode, type, - int_size_in_bytes (type) <= UNITS_PER_WORD); + /* The argument is passed entirely in registers. See at which + end it should be padded. */ + args[i].locate.where_pad = + BLOCK_REG_PADDING (mode, type, + int_size_in_bytes (type) <= UNITS_PER_WORD); #endif /* Update ARGS_SIZE, the total stack space for args so far. */ args_size->constant += args[i].locate.size.constant; if (args[i].locate.size.var) - ADD_PARM_SIZE (*args_size, args[i].locate.size.var); + ADD_PARM_SIZE (*args_size, args[i].locate.size.var); /* Increment ARGS_SO_FAR, which has info about which arg-registers - have been used, etc. */ + have been used, etc. */ FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type, - argpos < n_named_args); + argpos < n_named_args); } } @@ -1202,10 +1202,10 @@ static int compute_argument_block_size (int reg_parm_stack_space, - struct args_size *args_size, - tree fndecl ATTRIBUTE_UNUSED, - tree fntype ATTRIBUTE_UNUSED, - int preferred_stack_boundary ATTRIBUTE_UNUSED) + struct args_size *args_size, + tree fndecl ATTRIBUTE_UNUSED, + tree fntype ATTRIBUTE_UNUSED, + int preferred_stack_boundary ATTRIBUTE_UNUSED) { int unadjusted_args_size = args_size->constant; @@ -1226,45 +1226,45 @@ preferred_stack_boundary /= BITS_PER_UNIT; if (preferred_stack_boundary > 1) - { - /* We don't handle this case yet. To handle it correctly we have - to add the delta, round and subtract the delta. - Currently no machine description requires this support. */ - gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); - args_size->var = round_up (args_size->var, preferred_stack_boundary); - } + { + /* We don't handle this case yet. To handle it correctly we have + to add the delta, round and subtract the delta. + Currently no machine description requires this support. */ + gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); + args_size->var = round_up (args_size->var, preferred_stack_boundary); + } if (reg_parm_stack_space > 0) - { - args_size->var - = size_binop (MAX_EXPR, args_size->var, - ssize_int (reg_parm_stack_space)); - - /* The area corresponding to register parameters is not to count in - the size of the block we need. So make the adjustment. */ - if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - args_size->var - = size_binop (MINUS_EXPR, args_size->var, - ssize_int (reg_parm_stack_space)); - } + { + args_size->var + = size_binop (MAX_EXPR, args_size->var, + ssize_int (reg_parm_stack_space)); + + /* The area corresponding to register parameters is not to count in + the size of the block we need. So make the adjustment. */ + if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) + args_size->var + = size_binop (MINUS_EXPR, args_size->var, + ssize_int (reg_parm_stack_space)); + } } else { preferred_stack_boundary /= BITS_PER_UNIT; if (preferred_stack_boundary < 1) - preferred_stack_boundary = 1; + preferred_stack_boundary = 1; args_size->constant = (((args_size->constant - + stack_pointer_delta - + preferred_stack_boundary - 1) - / preferred_stack_boundary - * preferred_stack_boundary) - - stack_pointer_delta); + + stack_pointer_delta + + preferred_stack_boundary - 1) + / preferred_stack_boundary + * preferred_stack_boundary) + - stack_pointer_delta); args_size->constant = MAX (args_size->constant, - reg_parm_stack_space); + reg_parm_stack_space); if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - args_size->constant -= reg_parm_stack_space; + args_size->constant -= reg_parm_stack_space; } return unadjusted_args_size; } @@ -1303,37 +1303,37 @@ enum machine_mode mode; if (TREE_CODE (args[i].tree_value) != CALL_EXPR) - continue; + continue; /* If this is an addressable type, we cannot pre-evaluate it. */ type = TREE_TYPE (args[i].tree_value); gcc_assert (!TREE_ADDRESSABLE (type)); args[i].initial_value = args[i].value - = expand_normal (args[i].tree_value); + = expand_normal (args[i].tree_value); mode = TYPE_MODE (type); if (mode != args[i].mode) - { - int unsignedp = args[i].unsignedp; - args[i].value - = convert_modes (args[i].mode, mode, - args[i].value, args[i].unsignedp); - - /* CSE will replace this only if it contains args[i].value - pseudo, so convert it down to the declared mode using - a SUBREG. */ - if (REG_P (args[i].value) - && GET_MODE_CLASS (args[i].mode) == MODE_INT - && promote_mode (type, mode, &unsignedp) != args[i].mode) - { - args[i].initial_value - = gen_lowpart_SUBREG (mode, args[i].value); - SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; - SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, - args[i].unsignedp); - } - } + { + int unsignedp = args[i].unsignedp; + args[i].value + = convert_modes (args[i].mode, mode, + args[i].value, args[i].unsignedp); + + /* CSE will replace this only if it contains args[i].value + pseudo, so convert it down to the declared mode using + a SUBREG. */ + if (REG_P (args[i].value) + && GET_MODE_CLASS (args[i].mode) == MODE_INT + && promote_mode (type, mode, &unsignedp) != args[i].mode) + { + args[i].initial_value + = gen_lowpart_SUBREG (mode, args[i].value); + SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; + SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, + args[i].unsignedp); + } + } } } @@ -1343,7 +1343,7 @@ static int finalize_must_preallocate (int must_preallocate, int num_actuals, - struct arg_data *args, struct args_size *args_size) + struct arg_data *args, struct args_size *args_size) { /* See if we have or want to preallocate stack space. @@ -1371,24 +1371,24 @@ int i; for (i = 0; i < num_actuals && ! must_preallocate; i++) - { - if (args[i].partial > 0 && ! args[i].pass_on_stack) - partial_seen = 1; - else if (partial_seen && args[i].reg == 0) - must_preallocate = 1; - - if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode - && (TREE_CODE (args[i].tree_value) == CALL_EXPR - || TREE_CODE (args[i].tree_value) == TARGET_EXPR - || TREE_CODE (args[i].tree_value) == COND_EXPR - || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) - copy_to_evaluate_size - += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - } + { + if (args[i].partial > 0 && ! args[i].pass_on_stack) + partial_seen = 1; + else if (partial_seen && args[i].reg == 0) + must_preallocate = 1; + + if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode + && (TREE_CODE (args[i].tree_value) == CALL_EXPR + || TREE_CODE (args[i].tree_value) == TARGET_EXPR + || TREE_CODE (args[i].tree_value) == COND_EXPR + || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) + copy_to_evaluate_size + += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + } if (copy_to_evaluate_size * 2 >= args_size->constant - && args_size->constant > 0) - must_preallocate = 1; + && args_size->constant > 0) + must_preallocate = 1; } return must_preallocate; } @@ -1410,87 +1410,87 @@ int i, arg_offset = 0; if (GET_CODE (argblock) == PLUS) - arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); + arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); for (i = 0; i < num_actuals; i++) - { - rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); - rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); - rtx addr; - unsigned int align, boundary; - unsigned int units_on_stack = 0; - enum machine_mode partial_mode = VOIDmode; - - /* Skip this parm if it will not be passed on the stack. */ - if (! args[i].pass_on_stack - && args[i].reg != 0 - && args[i].partial == 0) - continue; - - if (CONST_INT_P (offset)) - addr = plus_constant (arg_reg, INTVAL (offset)); - else - addr = gen_rtx_PLUS (Pmode, arg_reg, offset); - - addr = plus_constant (addr, arg_offset); - - if (args[i].partial != 0) - { - /* Only part of the parameter is being passed on the stack. - Generate a simple memory reference of the correct size. */ - units_on_stack = args[i].locate.size.constant; - partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT, - MODE_INT, 1); - args[i].stack = gen_rtx_MEM (partial_mode, addr); - set_mem_size (args[i].stack, GEN_INT (units_on_stack)); - } - else - { - args[i].stack = gen_rtx_MEM (args[i].mode, addr); - set_mem_attributes (args[i].stack, - TREE_TYPE (args[i].tree_value), 1); - } - align = BITS_PER_UNIT; - boundary = args[i].locate.boundary; - if (args[i].locate.where_pad != downward) - align = boundary; - else if (CONST_INT_P (offset)) - { - align = INTVAL (offset) * BITS_PER_UNIT | boundary; - align = align & -align; - } - set_mem_align (args[i].stack, align); - - if (CONST_INT_P (slot_offset)) - addr = plus_constant (arg_reg, INTVAL (slot_offset)); - else - addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); - - addr = plus_constant (addr, arg_offset); - - if (args[i].partial != 0) - { - /* Only part of the parameter is being passed on the stack. - Generate a simple memory reference of the correct size. - */ - args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); - set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack)); - } - else - { - args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); - set_mem_attributes (args[i].stack_slot, - TREE_TYPE (args[i].tree_value), 1); - } - set_mem_align (args[i].stack_slot, args[i].locate.boundary); - - /* Function incoming arguments may overlap with sibling call - outgoing arguments and we cannot allow reordering of reads - from function arguments with stores to outgoing arguments - of sibling calls. */ - set_mem_alias_set (args[i].stack, 0); - set_mem_alias_set (args[i].stack_slot, 0); - } + { + rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); + rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); + rtx addr; + unsigned int align, boundary; + unsigned int units_on_stack = 0; + enum machine_mode partial_mode = VOIDmode; + + /* Skip this parm if it will not be passed on the stack. */ + if (! args[i].pass_on_stack + && args[i].reg != 0 + && args[i].partial == 0) + continue; + + if (CONST_INT_P (offset)) + addr = plus_constant (arg_reg, INTVAL (offset)); + else + addr = gen_rtx_PLUS (Pmode, arg_reg, offset); + + addr = plus_constant (addr, arg_offset); + + if (args[i].partial != 0) + { + /* Only part of the parameter is being passed on the stack. + Generate a simple memory reference of the correct size. */ + units_on_stack = args[i].locate.size.constant; + partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT, + MODE_INT, 1); + args[i].stack = gen_rtx_MEM (partial_mode, addr); + set_mem_size (args[i].stack, GEN_INT (units_on_stack)); + } + else + { + args[i].stack = gen_rtx_MEM (args[i].mode, addr); + set_mem_attributes (args[i].stack, + TREE_TYPE (args[i].tree_value), 1); + } + align = BITS_PER_UNIT; + boundary = args[i].locate.boundary; + if (args[i].locate.where_pad != downward) + align = boundary; + else if (CONST_INT_P (offset)) + { + align = INTVAL (offset) * BITS_PER_UNIT | boundary; + align = align & -align; + } + set_mem_align (args[i].stack, align); + + if (CONST_INT_P (slot_offset)) + addr = plus_constant (arg_reg, INTVAL (slot_offset)); + else + addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); + + addr = plus_constant (addr, arg_offset); + + if (args[i].partial != 0) + { + /* Only part of the parameter is being passed on the stack. + Generate a simple memory reference of the correct size. + */ + args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); + set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack)); + } + else + { + args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); + set_mem_attributes (args[i].stack_slot, + TREE_TYPE (args[i].tree_value), 1); + } + set_mem_align (args[i].stack_slot, args[i].locate.boundary); + + /* Function incoming arguments may overlap with sibling call + outgoing arguments and we cannot allow reordering of reads + from function arguments with stores to outgoing arguments + of sibling calls. */ + set_mem_alias_set (args[i].stack, 0); + set_mem_alias_set (args[i].stack_slot, 0); + } } } @@ -1511,12 +1511,12 @@ if (fndecl) { /* If this is the first use of the function, see if we need to - make an external definition for it. */ + make an external definition for it. */ if (!TREE_USED (fndecl) && fndecl != current_function_decl) - { - assemble_external (fndecl); - TREE_USED (fndecl) = 1; - } + { + assemble_external (fndecl); + TREE_USED (fndecl) = 1; + } /* Get a SYMBOL_REF rtx for the function address. */ funexp = XEXP (DECL_RTL (fndecl), 0); @@ -1526,7 +1526,7 @@ { push_temp_slots (); funexp = expand_normal (addr); - pop_temp_slots (); /* FUNEXP can't be BLKmode. */ + pop_temp_slots (); /* FUNEXP can't be BLKmode. */ } return funexp; } @@ -1544,13 +1544,13 @@ if (addr == crtl->args.internal_arg_pointer) i = 0; else if (GET_CODE (addr) == PLUS - && XEXP (addr, 0) == crtl->args.internal_arg_pointer - && CONST_INT_P (XEXP (addr, 1))) + && XEXP (addr, 0) == crtl->args.internal_arg_pointer + && CONST_INT_P (XEXP (addr, 1))) i = INTVAL (XEXP (addr, 1)); /* Return true for arg pointer based indexed addressing. */ else if (GET_CODE (addr) == PLUS - && (XEXP (addr, 0) == crtl->args.internal_arg_pointer - || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) + && (XEXP (addr, 0) == crtl->args.internal_arg_pointer + || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) return true; else return false; @@ -1563,9 +1563,9 @@ unsigned HOST_WIDE_INT k; for (k = 0; k < size; k++) - if (i + k < stored_args_map->n_bits - && TEST_BIT (stored_args_map, i + k)) - return true; + if (i + k < stored_args_map->n_bits + && TEST_BIT (stored_args_map, i + k)) + return true; } return false; @@ -1583,140 +1583,140 @@ static void load_register_parameters (struct arg_data *args, int num_actuals, - rtx *call_fusage, int flags, int is_sibcall, - int *sibcall_failure) + rtx *call_fusage, int flags, int is_sibcall, + int *sibcall_failure) { int i, j; for (i = 0; i < num_actuals; i++) { rtx reg = ((flags & ECF_SIBCALL) - ? args[i].tail_call_reg : args[i].reg); + ? args[i].tail_call_reg : args[i].reg); if (reg) - { - int partial = args[i].partial; - int nregs; - int size = 0; - rtx before_arg = get_last_insn (); - /* Set non-negative if we must move a word at a time, even if - just one word (e.g, partial == 4 && mode == DFmode). Set - to -1 if we just use a normal move insn. This value can be - zero if the argument is a zero size structure. */ - nregs = -1; - if (GET_CODE (reg) == PARALLEL) - ; - else if (partial) - { - gcc_assert (partial % UNITS_PER_WORD == 0); - nregs = partial / UNITS_PER_WORD; - } - else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) - { - size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; - } - else - size = GET_MODE_SIZE (args[i].mode); - - /* Handle calls that pass values in multiple non-contiguous - locations. The Irix 6 ABI has examples of this. */ - - if (GET_CODE (reg) == PARALLEL) - emit_group_move (reg, args[i].parallel_value); - - /* If simple case, just do move. If normal partial, store_one_arg - has already loaded the register for us. In all other cases, - load the register(s) from memory. */ - - else if (nregs == -1) - { - emit_move_insn (reg, args[i].value); + { + int partial = args[i].partial; + int nregs; + int size = 0; + rtx before_arg = get_last_insn (); + /* Set non-negative if we must move a word at a time, even if + just one word (e.g, partial == 4 && mode == DFmode). Set + to -1 if we just use a normal move insn. This value can be + zero if the argument is a zero size structure. */ + nregs = -1; + if (GET_CODE (reg) == PARALLEL) + ; + else if (partial) + { + gcc_assert (partial % UNITS_PER_WORD == 0); + nregs = partial / UNITS_PER_WORD; + } + else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) + { + size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; + } + else + size = GET_MODE_SIZE (args[i].mode); + + /* Handle calls that pass values in multiple non-contiguous + locations. The Irix 6 ABI has examples of this. */ + + if (GET_CODE (reg) == PARALLEL) + emit_group_move (reg, args[i].parallel_value); + + /* If simple case, just do move. If normal partial, store_one_arg + has already loaded the register for us. In all other cases, + load the register(s) from memory. */ + + else if (nregs == -1) + { + emit_move_insn (reg, args[i].value); #ifdef BLOCK_REG_PADDING - /* Handle case where we have a value that needs shifting - up to the msb. eg. a QImode value and we're padding - upward on a BYTES_BIG_ENDIAN machine. */ - if (size < UNITS_PER_WORD - && (args[i].locate.where_pad - == (BYTES_BIG_ENDIAN ? upward : downward))) - { - rtx x; - int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; - - /* Assigning REG here rather than a temp makes CALL_FUSAGE - report the whole reg as used. Strictly speaking, the - call only uses SIZE bytes at the msb end, but it doesn't - seem worth generating rtl to say that. */ - reg = gen_rtx_REG (word_mode, REGNO (reg)); - x = expand_shift (LSHIFT_EXPR, word_mode, reg, - build_int_cst (NULL_TREE, shift), - reg, 1); - if (x != reg) - emit_move_insn (reg, x); - } + /* Handle case where we have a value that needs shifting + up to the msb. eg. a QImode value and we're padding + upward on a BYTES_BIG_ENDIAN machine. */ + if (size < UNITS_PER_WORD + && (args[i].locate.where_pad + == (BYTES_BIG_ENDIAN ? upward : downward))) + { + rtx x; + int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; + + /* Assigning REG here rather than a temp makes CALL_FUSAGE + report the whole reg as used. Strictly speaking, the + call only uses SIZE bytes at the msb end, but it doesn't + seem worth generating rtl to say that. */ + reg = gen_rtx_REG (word_mode, REGNO (reg)); + x = expand_shift (LSHIFT_EXPR, word_mode, reg, + build_int_cst (NULL_TREE, shift), + reg, 1); + if (x != reg) + emit_move_insn (reg, x); + } #endif - } - - /* If we have pre-computed the values to put in the registers in - the case of non-aligned structures, copy them in now. */ - - else if (args[i].n_aligned_regs != 0) - for (j = 0; j < args[i].n_aligned_regs; j++) - emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j), - args[i].aligned_regs[j]); - - else if (partial == 0 || args[i].pass_on_stack) - { - rtx mem = validize_mem (args[i].value); - - /* Check for overlap with already clobbered argument area. */ - if (is_sibcall - && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0), - size)) - *sibcall_failure = 1; - - /* Handle a BLKmode that needs shifting. */ - if (nregs == 1 && size < UNITS_PER_WORD + } + + /* If we have pre-computed the values to put in the registers in + the case of non-aligned structures, copy them in now. */ + + else if (args[i].n_aligned_regs != 0) + for (j = 0; j < args[i].n_aligned_regs; j++) + emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j), + args[i].aligned_regs[j]); + + else if (partial == 0 || args[i].pass_on_stack) + { + rtx mem = validize_mem (args[i].value); + + /* Check for overlap with already clobbered argument area. */ + if (is_sibcall + && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0), + size)) + *sibcall_failure = 1; + + /* Handle a BLKmode that needs shifting. */ + if (nregs == 1 && size < UNITS_PER_WORD #ifdef BLOCK_REG_PADDING - && args[i].locate.where_pad == downward + && args[i].locate.where_pad == downward #else - && BYTES_BIG_ENDIAN + && BYTES_BIG_ENDIAN #endif - ) - { - rtx tem = operand_subword_force (mem, 0, args[i].mode); - rtx ri = gen_rtx_REG (word_mode, REGNO (reg)); - rtx x = gen_reg_rtx (word_mode); - int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; - enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR - : LSHIFT_EXPR; - - emit_move_insn (x, tem); - x = expand_shift (dir, word_mode, x, - build_int_cst (NULL_TREE, shift), - ri, 1); - if (x != ri) - emit_move_insn (ri, x); - } - else - move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); - } - - /* When a parameter is a block, and perhaps in other cases, it is - possible that it did a load from an argument slot that was - already clobbered. */ - if (is_sibcall - && check_sibcall_argument_overlap (before_arg, &args[i], 0)) - *sibcall_failure = 1; - - /* Handle calls that pass values in multiple non-contiguous - locations. The Irix 6 ABI has examples of this. */ - if (GET_CODE (reg) == PARALLEL) - use_group_regs (call_fusage, reg); - else if (nregs == -1) - use_reg (call_fusage, reg); - else if (nregs > 0) - use_regs (call_fusage, REGNO (reg), nregs); - } + ) + { + rtx tem = operand_subword_force (mem, 0, args[i].mode); + rtx ri = gen_rtx_REG (word_mode, REGNO (reg)); + rtx x = gen_reg_rtx (word_mode); + int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; + enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR + : LSHIFT_EXPR; + + emit_move_insn (x, tem); + x = expand_shift (dir, word_mode, x, + build_int_cst (NULL_TREE, shift), + ri, 1); + if (x != ri) + emit_move_insn (ri, x); + } + else + move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); + } + + /* When a parameter is a block, and perhaps in other cases, it is + possible that it did a load from an argument slot that was + already clobbered. */ + if (is_sibcall + && check_sibcall_argument_overlap (before_arg, &args[i], 0)) + *sibcall_failure = 1; + + /* Handle calls that pass values in multiple non-contiguous + locations. The Irix 6 ABI has examples of this. */ + if (GET_CODE (reg) == PARALLEL) + use_group_regs (call_fusage, reg); + else if (nregs == -1) + use_reg (call_fusage, reg); + else if (nregs > 0) + use_regs (call_fusage, REGNO (reg), nregs); + } } } @@ -1731,8 +1731,8 @@ static int combine_pending_stack_adjustment_and_call (int unadjusted_args_size, - struct args_size *args_size, - unsigned int preferred_unit_stack_boundary) + struct args_size *args_size, + unsigned int preferred_unit_stack_boundary) { /* The number of bytes to pop so that the stack will be under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ @@ -1761,9 +1761,9 @@ if (preferred_unit_stack_boundary > 1) { if (unadjusted_alignment > 0) - adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; + adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; else - adjustment += unadjusted_alignment; + adjustment += unadjusted_alignment; } /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of @@ -1796,23 +1796,23 @@ if (code == MEM) return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0), - GET_MODE_SIZE (GET_MODE (x))); + GET_MODE_SIZE (GET_MODE (x))); /* Scan all subexpressions. */ fmt = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) { if (*fmt == 'e') - { - if (check_sibcall_argument_overlap_1 (XEXP (x, i))) - return 1; - } + { + if (check_sibcall_argument_overlap_1 (XEXP (x, i))) + return 1; + } else if (*fmt == 'E') - { - for (j = 0; j < XVECLEN (x, i); j++) - if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) - return 1; - } + { + for (j = 0; j < XVECLEN (x, i); j++) + if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) + return 1; + } } return 0; } @@ -1836,7 +1836,7 @@ for (; insn; insn = NEXT_INSN (insn)) if (INSN_P (insn) - && check_sibcall_argument_overlap_1 (PATTERN (insn))) + && check_sibcall_argument_overlap_1 (PATTERN (insn))) break; if (mark_stored_args_map) @@ -1848,7 +1848,7 @@ #endif for (high = low + arg->locate.size.constant; low < high; low++) - SET_BIT (stored_args_map, low); + SET_BIT (stored_args_map, low); } return insn != NULL_RTX; } @@ -1871,14 +1871,13 @@ of the MIPS port, which requires SImode values to be sign-extended when stored in 64-bit registers. */ if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab, - value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) + value, GEN_INT (shift), value, 1, OPTAB_WIDEN)) gcc_unreachable (); return true; } #ifndef noCbC #include "cbc-tree.h" -#include "cbc-goto.h" #endif /* If X is a likely-spilled register value, copy it to a pseudo @@ -1894,9 +1893,9 @@ && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x)))) { /* Make sure that we generate a REG rather than a CONCAT. - Moves into CONCATs can need nontrivial instructions, - and the whole point of this function is to avoid - using the hard register directly in such a situation. */ + Moves into CONCATs can need nontrivial instructions, + and the whole point of this function is to avoid + using the hard register directly in such a situation. */ generating_concat_p = 0; new_rtx = gen_reg_rtx (GET_MODE (x)); generating_concat_p = 1; @@ -2002,7 +2001,7 @@ /* Define the boundary of the register parm stack space that needs to be saved, if any. */ int low_to_save, high_to_save; - rtx save_area = 0; /* Place that it is saved */ + rtx save_area = 0; /* Place that it is saved */ #endif int initial_highest_arg_in_use = highest_outgoing_arg_in_use; @@ -2064,25 +2063,25 @@ if ((flags & (ECF_CONST | ECF_PURE)) && (!(flags & ECF_LOOPING_CONST_OR_PURE)) && (ignore || target == const0_rtx - || TYPE_MODE (rettype) == VOIDmode)) + || TYPE_MODE (rettype) == VOIDmode)) { bool volatilep = false; tree arg; call_expr_arg_iterator iter; FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) - if (TREE_THIS_VOLATILE (arg)) - { - volatilep = true; - break; - } + if (TREE_THIS_VOLATILE (arg)) + { + volatilep = true; + break; + } if (! volatilep) - { - FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) - expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); - return const0_rtx; - } + { + FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) + expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); + return const0_rtx; + } } #ifdef REG_PARM_STACK_SPACE @@ -2103,25 +2102,25 @@ #ifdef PCC_STATIC_STRUCT_RETURN { - pcc_struct_value = 1; + pcc_struct_value = 1; } #else /* not PCC_STATIC_STRUCT_RETURN */ { - struct_value_size = int_size_in_bytes (rettype); - - if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) - structure_value_addr = XEXP (target, 0); - else - { - /* For variable-sized objects, we must be called with a target - specified. If we were to allocate space on the stack here, - we would have no way of knowing when to free it. */ - rtx d = assign_temp (rettype, 0, 1, 1); - - mark_temp_addr_taken (d); - structure_value_addr = XEXP (d, 0); - target = 0; - } + struct_value_size = int_size_in_bytes (rettype); + + if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) + structure_value_addr = XEXP (target, 0); + else + { + /* For variable-sized objects, we must be called with a target + specified. If we were to allocate space on the stack here, + we would have no way of knowing when to free it. */ + rtx d = assign_temp (rettype, 0, 1, 1); + + mark_temp_addr_taken (d); + structure_value_addr = XEXP (d, 0); + target = 0; + } } #endif /* not PCC_STATIC_STRUCT_RETURN */ } @@ -2132,14 +2131,14 @@ { struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl); /* Without automatic stack alignment, we can't increase preferred - stack boundary. With automatic stack alignment, it is - unnecessary since unless we can guarantee that all callers will - align the outgoing stack properly, callee has to align its - stack anyway. */ + stack boundary. With automatic stack alignment, it is + unnecessary since unless we can guarantee that all callers will + align the outgoing stack properly, callee has to align its + stack anyway. */ if (i - && i->preferred_incoming_stack_boundary - && i->preferred_incoming_stack_boundary < preferred_stack_boundary) - preferred_stack_boundary = i->preferred_incoming_stack_boundary; + && i->preferred_incoming_stack_boundary + && i->preferred_incoming_stack_boundary < preferred_stack_boundary) + preferred_stack_boundary = i->preferred_incoming_stack_boundary; } /* Operand 0 is a pointer-to-function; get the type of the function. */ @@ -2155,12 +2154,12 @@ call_expr_arg_iterator iter; tree arg; FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) - { - tree type = TREE_TYPE (arg); - if (type && TREE_CODE (type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (type)) - num_complex_actuals++; - } + { + tree type = TREE_TYPE (arg); + if (type && TREE_CODE (type) == COMPLEX_TYPE + && targetm.calls.split_complex_arg (type)) + num_complex_actuals++; + } type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); } else @@ -2175,20 +2174,20 @@ if (structure_value_addr && struct_value == 0) { /* If structure_value_addr is a REG other than - virtual_outgoing_args_rtx, we can use always use it. If it - is not a REG, we must always copy it into a register. - If it is virtual_outgoing_args_rtx, we must copy it to another - register in some cases. */ + virtual_outgoing_args_rtx, we can use always use it. If it + is not a REG, we must always copy it into a register. + If it is virtual_outgoing_args_rtx, we must copy it to another + register in some cases. */ rtx temp = (!REG_P (structure_value_addr) - || (ACCUMULATE_OUTGOING_ARGS - && stack_arg_under_construction - && structure_value_addr == virtual_outgoing_args_rtx) - ? copy_addr_to_reg (convert_memory_address - (Pmode, structure_value_addr)) - : structure_value_addr); + || (ACCUMULATE_OUTGOING_ARGS + && stack_arg_under_construction + && structure_value_addr == virtual_outgoing_args_rtx) + ? copy_addr_to_reg (convert_memory_address + (Pmode, structure_value_addr)) + : structure_value_addr); structure_value_addr_value = - make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); + make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); structure_value_addr_parm = 1; } @@ -2202,8 +2201,8 @@ if (type_arg_types != 0) n_named_args = (list_length (type_arg_types) - /* Count the struct value address, if it is passed as a parm. */ - + structure_value_addr_parm); + /* Count the struct value address, if it is passed as a parm. */ + + structure_value_addr_parm); else /* If we know nothing, treat all args as named. */ n_named_args = num_actuals; @@ -2238,7 +2237,7 @@ && targetm.calls.strict_argument_naming (&args_so_far)) ; else if (type_arg_types != 0 - && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far)) + && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far)) /* Don't include the last named arg. */ --n_named_args; else @@ -2252,20 +2251,20 @@ /* Build up entries in the ARGS array, compute the size of the arguments into ARGS_SIZE, etc. */ initialize_argument_information (num_actuals, args, &args_size, - n_named_args, exp, - structure_value_addr_value, fndecl, fntype, - &args_so_far, reg_parm_stack_space, - &old_stack_level, &old_pending_adj, - &must_preallocate, &flags, - &try_tail_call, CALL_FROM_THUNK_P (exp)); + n_named_args, exp, + structure_value_addr_value, fndecl, fntype, + &args_so_far, reg_parm_stack_space, + &old_stack_level, &old_pending_adj, + &must_preallocate, &flags, + &try_tail_call, CALL_FROM_THUNK_P (exp)); if (args_size.var) must_preallocate = 1; /* Now make final decision about preallocating stack space. */ must_preallocate = finalize_must_preallocate (must_preallocate, - num_actuals, args, - &args_size); + num_actuals, args, + &args_size); /* If the structure value address will reference the stack pointer, we must stabilize it. We don't need to do this if we know that we are @@ -2273,10 +2272,10 @@ if (structure_value_addr && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) - || reg_mentioned_p (virtual_outgoing_args_rtx, - structure_value_addr)) + || reg_mentioned_p (virtual_outgoing_args_rtx, + structure_value_addr)) && (args_size.var - || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) + || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) structure_value_addr = copy_to_reg (structure_value_addr); /* Tail calls can make things harder to debug, and we've traditionally @@ -2284,8 +2283,13 @@ expanding a call, as that means we're an argument. Don't try if there's cleanups, as we know there's code to follow the call. */ + // -O2オプションがないときも末尾最適化が行われるように(Code Segmentのみ) if (currently_expanding_call++ != 0 +#ifndef noCbC + || ((!fndecl || !CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl))) && !flag_optimize_sibling_calls) +#else || !flag_optimize_sibling_calls +#endif || args_size.var || dbg_cnt (tail_call) == false) try_tail_call = 0; @@ -2299,39 +2303,39 @@ #endif || !try_tail_call /* Doing sibling call optimization needs some work, since - structure_value_addr can be allocated on the stack. - It does not seem worth the effort since few optimizable - sibling calls will return a structure. */ + structure_value_addr can be allocated on the stack. + It does not seem worth the effort since few optimizable + sibling calls will return a structure. */ || structure_value_addr != NULL_RTX #ifdef REG_PARM_STACK_SPACE /* If outgoing reg parm stack space changes, we can not do sibcall. */ || (OUTGOING_REG_PARM_STACK_SPACE (funtype) - != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))) + != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))) || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl)) #endif /* Check whether the target is able to optimize the call - into a sibcall. */ + into a sibcall. */ || !targetm.function_ok_for_sibcall (fndecl, exp) /* Functions that do not return exactly once may not be sibcall - optimized. */ + optimized. */ || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN)) || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))) /* If the called function is nested in the current one, it might access - some of the caller's arguments, but could clobber them beforehand if - the argument areas are shared. */ + some of the caller's arguments, but could clobber them beforehand if + the argument areas are shared. */ || (fndecl && decl_function_context (fndecl) == current_function_decl) /* If this function requires more stack slots than the current - function, we cannot change it into a sibling call. - crtl->args.pretend_args_size is not part of the - stack allocated by our caller. */ + function, we cannot change it into a sibling call. + crtl->args.pretend_args_size is not part of the + stack allocated by our caller. */ || args_size.constant > (crtl->args.size - - crtl->args.pretend_args_size) + - crtl->args.pretend_args_size) /* If the callee pops its own arguments, then it must pop exactly - the same number of arguments as the current function. */ + the same number of arguments as the current function. */ || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant) - != RETURN_POPS_ARGS (current_function_decl, - TREE_TYPE (current_function_decl), - crtl->args.size)) + != RETURN_POPS_ARGS (current_function_decl, + TREE_TYPE (current_function_decl), + crtl->args.size)) || !lang_hooks.decls.ok_for_sibcall (fndecl)) try_tail_call = 0; @@ -2349,21 +2353,21 @@ callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); callee_mode = TYPE_MODE (TREE_TYPE (funtype)); caller_promoted_mode - = promote_function_mode (TREE_TYPE (caller_res), caller_mode, - &caller_unsignedp, - TREE_TYPE (current_function_decl), 1); + = promote_function_mode (TREE_TYPE (caller_res), caller_mode, + &caller_unsignedp, + TREE_TYPE (current_function_decl), 1); callee_promoted_mode - = promote_function_mode (TREE_TYPE (funtype), callee_mode, - &callee_unsignedp, - funtype, 1); + = promote_function_mode (TREE_TYPE (funtype), callee_mode, + &callee_unsignedp, + funtype, 1); if (caller_mode != VOIDmode - && (caller_promoted_mode != callee_promoted_mode - || ((caller_mode != caller_promoted_mode - || callee_mode != callee_promoted_mode) - && (caller_unsignedp != callee_unsignedp - || GET_MODE_BITSIZE (caller_mode) - < GET_MODE_BITSIZE (callee_mode))))) - try_tail_call = 0; + && (caller_promoted_mode != callee_promoted_mode + || ((caller_mode != caller_promoted_mode + || callee_mode != callee_promoted_mode) + && (caller_unsignedp != callee_unsignedp + || GET_MODE_BITSIZE (caller_mode) + < GET_MODE_BITSIZE (callee_mode))))) + try_tail_call = 0; } /* Ensure current function's preferred stack boundary is at least @@ -2384,27 +2388,29 @@ { args_size.constant = CbC_PRETENDED_STACK_SIZE; - return expand_cbc_goto(exp, target, fndecl, funtype, fntype, - addr, ignore, flags, num_actuals, args, &args_size, - args_so_far, - old_stack_level, reg_parm_stack_space, old_pending_adj, - preferred_stack_boundary, preferred_unit_stack_boundary, - structure_value_addr, old_inhibit_defer_pop); + // try_tail_callを矯正的に立たせて末尾最適化を必ずうように変更 + // -> expand_cbc_gotは不要に。 + /* return expand_cbc_goto(exp, target, fndecl, funtype, fntype, + * addr, ignore, flags, num_actuals, args, &args_size, + * args_so_far, + * old_stack_level, reg_parm_stack_space, old_pending_adj, + * preferred_stack_boundary, preferred_unit_stack_boundary, + * structure_value_addr, old_inhibit_defer_pop); */ } else if ( CbC_IS_CbC_GOTO (exp) ) { // TODO: 関数からコードセグメントへの遷移 /* if (fndecl) - { - char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl)); - warning(0, "no warning: code segment `%s' has been called from a function.", name_callee); + { + char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl)); + warning(0, "no warning: code segment `%s' has been called from a function.", name_callee); } else - { - warning(0, "no warning: unnamed code segment has been called from a function."); - } - */ + { + warning(0, "no warning: unnamed code segment has been called from a function."); + } + */ args_size.constant = CbC_PRETENDED_STACK_SIZE; } else if ( fndecl && CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl)) ) @@ -2420,6 +2426,19 @@ } #endif + // when tail call optimization flag was down, warn about them. + // and flag it to force a tail call optimize. +#ifndef noCbC + if (fndecl && CbC_IS_CODE_SEGMENT (TREE_TYPE (fndecl)) && try_tail_call == 0) + { + location_t loc = EXPR_LOCATION (exp); + char *name_callee = IDENTIFIER_POINTER(DECL_NAME(fndecl)); + warning_at (loc, 0, "Jump to Code Segment \"%s\" with CbC goto, but tail call optimization was cut.", + name_callee); + try_tail_call = 1; + } +#endif + /* We want to make two insn chains; one for a sibling call, the other for a normal call. We will select one of the two chains after initial RTL generation is complete. */ @@ -2427,325 +2446,325 @@ { int sibcall_failure = 0; /* We want to emit any pending stack adjustments before the tail - recursion "call". That way we know any adjustment after the tail - recursion call can be ignored if we indeed use the tail - call expansion. */ + recursion "call". That way we know any adjustment after the tail + recursion call can be ignored if we indeed use the tail + call expansion. */ int save_pending_stack_adjust = 0; int save_stack_pointer_delta = 0; rtx insns; rtx before_call, next_arg_reg, after_args; if (pass == 0) - { - /* State variables we need to save and restore between - iterations. */ - save_pending_stack_adjust = pending_stack_adjust; - save_stack_pointer_delta = stack_pointer_delta; - } + { + /* State variables we need to save and restore between + iterations. */ + save_pending_stack_adjust = pending_stack_adjust; + save_stack_pointer_delta = stack_pointer_delta; + } if (pass) - flags &= ~ECF_SIBCALL; + flags &= ~ECF_SIBCALL; else - flags |= ECF_SIBCALL; + flags |= ECF_SIBCALL; /* Other state variables that we must reinitialize each time - through the loop (that are not initialized by the loop itself). */ + through the loop (that are not initialized by the loop itself). */ argblock = 0; call_fusage = 0; /* Start a new sequence for the normal call case. - From this point on, if the sibling call fails, we want to set - sibcall_failure instead of continuing the loop. */ + From this point on, if the sibling call fails, we want to set + sibcall_failure instead of continuing the loop. */ start_sequence (); /* Don't let pending stack adjusts add up to too much. - Also, do all pending adjustments now if there is any chance - this might be a call to alloca or if we are expanding a sibling - call sequence. - Also do the adjustments before a throwing call, otherwise - exception handling can fail; PR 19225. */ + Also, do all pending adjustments now if there is any chance + this might be a call to alloca or if we are expanding a sibling + call sequence. + Also do the adjustments before a throwing call, otherwise + exception handling can fail; PR 19225. */ if (pending_stack_adjust >= 32 - || (pending_stack_adjust > 0 - && (flags & ECF_MAY_BE_ALLOCA)) - || (pending_stack_adjust > 0 - && flag_exceptions && !(flags & ECF_NOTHROW)) - || pass == 0) - do_pending_stack_adjust (); + || (pending_stack_adjust > 0 + && (flags & ECF_MAY_BE_ALLOCA)) + || (pending_stack_adjust > 0 + && flag_exceptions && !(flags & ECF_NOTHROW)) + || pass == 0) + do_pending_stack_adjust (); /* Precompute any arguments as needed. */ if (pass) - precompute_arguments (num_actuals, args); + precompute_arguments (num_actuals, args); /* Now we are about to start emitting insns that can be deleted - if a libcall is deleted. */ + if a libcall is deleted. */ if (pass && (flags & ECF_MALLOC)) - start_sequence (); + start_sequence (); if (pass == 0 && crtl->stack_protect_guard) - stack_protect_epilogue (); + stack_protect_epilogue (); adjusted_args_size = args_size; /* Compute the actual size of the argument block required. The variable - and constant sizes must be combined, the size may have to be rounded, - and there may be a minimum required size. When generating a sibcall - pattern, do not round up, since we'll be re-using whatever space our - caller provided. */ + and constant sizes must be combined, the size may have to be rounded, + and there may be a minimum required size. When generating a sibcall + pattern, do not round up, since we'll be re-using whatever space our + caller provided. */ #ifndef noCbC if ( fntype && CbC_IS_CODE_SEGMENT(fntype) ) { - unadjusted_args_size = args_size.constant; - adjusted_args_size.constant = CbC_PRETENDED_STACK_SIZE; - compute_argument_block_size (reg_parm_stack_space, - &adjusted_args_size, - fndecl, fntype, - (pass == 0 ? 0 - : preferred_stack_boundary)); + unadjusted_args_size = args_size.constant; + adjusted_args_size.constant = CbC_PRETENDED_STACK_SIZE; + compute_argument_block_size (reg_parm_stack_space, + &adjusted_args_size, + fndecl, fntype, + (pass == 0 ? 0 + : preferred_stack_boundary)); } else #endif { unadjusted_args_size - = compute_argument_block_size (reg_parm_stack_space, - &adjusted_args_size, - fndecl, fntype, - (pass == 0 ? 0 - : preferred_stack_boundary)); + = compute_argument_block_size (reg_parm_stack_space, + &adjusted_args_size, + fndecl, fntype, + (pass == 0 ? 0 + : preferred_stack_boundary)); } old_stack_allocated = stack_pointer_delta - pending_stack_adjust; /* The argument block when performing a sibling call is the - incoming argument block. */ + incoming argument block. */ if (pass == 0) - { - argblock = crtl->args.internal_arg_pointer; - argblock + { + argblock = crtl->args.internal_arg_pointer; + argblock #ifdef STACK_GROWS_DOWNWARD - = plus_constant (argblock, crtl->args.pretend_args_size); + = plus_constant (argblock, crtl->args.pretend_args_size); #else - = plus_constant (argblock, -crtl->args.pretend_args_size); + = plus_constant (argblock, -crtl->args.pretend_args_size); #endif - stored_args_map = sbitmap_alloc (args_size.constant); - sbitmap_zero (stored_args_map); - } + stored_args_map = sbitmap_alloc (args_size.constant); + sbitmap_zero (stored_args_map); + } /* If we have no actual push instructions, or shouldn't use them, - make space for all args right now. */ + make space for all args right now. */ else if (adjusted_args_size.var != 0) - { - if (old_stack_level == 0) - { - emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); - old_stack_pointer_delta = stack_pointer_delta; - old_pending_adj = pending_stack_adjust; - pending_stack_adjust = 0; - /* stack_arg_under_construction says whether a stack arg is - being constructed at the old stack level. Pushing the stack - gets a clean outgoing argument block. */ - old_stack_arg_under_construction = stack_arg_under_construction; - stack_arg_under_construction = 0; - } - argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0); - } + { + if (old_stack_level == 0) + { + emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); + old_stack_pointer_delta = stack_pointer_delta; + old_pending_adj = pending_stack_adjust; + pending_stack_adjust = 0; + /* stack_arg_under_construction says whether a stack arg is + being constructed at the old stack level. Pushing the stack + gets a clean outgoing argument block. */ + old_stack_arg_under_construction = stack_arg_under_construction; + stack_arg_under_construction = 0; + } + argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0); + } else - { - /* Note that we must go through the motions of allocating an argument - block even if the size is zero because we may be storing args - in the area reserved for register arguments, which may be part of - the stack frame. */ - - int needed = adjusted_args_size.constant; - - /* Store the maximum argument space used. It will be pushed by - the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow - checking). */ - - if (needed > crtl->outgoing_args_size) - crtl->outgoing_args_size = needed; - - if (must_preallocate) - { - if (ACCUMULATE_OUTGOING_ARGS) - { - /* Since the stack pointer will never be pushed, it is - possible for the evaluation of a parm to clobber - something we have already written to the stack. - Since most function calls on RISC machines do not use - the stack, this is uncommon, but must work correctly. - - Therefore, we save any area of the stack that was already - written and that we are using. Here we set up to do this - by making a new stack usage map from the old one. The - actual save will be done by store_one_arg. - - Another approach might be to try to reorder the argument - evaluations to avoid this conflicting stack usage. */ - - /* Since we will be writing into the entire argument area, - the map must be allocated for its entire size, not just - the part that is the responsibility of the caller. */ - if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - needed += reg_parm_stack_space; + { + /* Note that we must go through the motions of allocating an argument + block even if the size is zero because we may be storing args + in the area reserved for register arguments, which may be part of + the stack frame. */ + + int needed = adjusted_args_size.constant; + + /* Store the maximum argument space used. It will be pushed by + the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow + checking). */ + + if (needed > crtl->outgoing_args_size) + crtl->outgoing_args_size = needed; + + if (must_preallocate) + { + if (ACCUMULATE_OUTGOING_ARGS) + { + /* Since the stack pointer will never be pushed, it is + possible for the evaluation of a parm to clobber + something we have already written to the stack. + Since most function calls on RISC machines do not use + the stack, this is uncommon, but must work correctly. + + Therefore, we save any area of the stack that was already + written and that we are using. Here we set up to do this + by making a new stack usage map from the old one. The + actual save will be done by store_one_arg. + + Another approach might be to try to reorder the argument + evaluations to avoid this conflicting stack usage. */ + + /* Since we will be writing into the entire argument area, + the map must be allocated for its entire size, not just + the part that is the responsibility of the caller. */ + if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) + needed += reg_parm_stack_space; #ifdef ARGS_GROW_DOWNWARD - highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed + 1); + highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, + needed + 1); #else - highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed); + highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, + needed); #endif - if (stack_usage_map_buf) - free (stack_usage_map_buf); - stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); - stack_usage_map = stack_usage_map_buf; - - if (initial_highest_arg_in_use) - memcpy (stack_usage_map, initial_stack_usage_map, - initial_highest_arg_in_use); - - if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) - memset (&stack_usage_map[initial_highest_arg_in_use], 0, - (highest_outgoing_arg_in_use - - initial_highest_arg_in_use)); - needed = 0; - - /* The address of the outgoing argument list must not be - copied to a register here, because argblock would be left - pointing to the wrong place after the call to - allocate_dynamic_stack_space below. */ - - argblock = virtual_outgoing_args_rtx; - } - else - { - if (inhibit_defer_pop == 0) - { - /* Try to reuse some or all of the pending_stack_adjust - to get this space. */ - needed - = (combine_pending_stack_adjustment_and_call - (unadjusted_args_size, - &adjusted_args_size, - preferred_unit_stack_boundary)); - - /* combine_pending_stack_adjustment_and_call computes - an adjustment before the arguments are allocated. - Account for them and see whether or not the stack - needs to go up or down. */ - needed = unadjusted_args_size - needed; - - if (needed < 0) - { - /* We're releasing stack space. */ - /* ??? We can avoid any adjustment at all if we're - already aligned. FIXME. */ - pending_stack_adjust = -needed; - do_pending_stack_adjust (); - needed = 0; - } - else - /* We need to allocate space. We'll do that in - push_block below. */ - pending_stack_adjust = 0; - } - - /* Special case this because overhead of `push_block' in - this case is non-trivial. */ - if (needed == 0) - argblock = virtual_outgoing_args_rtx; - else - { - argblock = push_block (GEN_INT (needed), 0, 0); + if (stack_usage_map_buf) + free (stack_usage_map_buf); + stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); + stack_usage_map = stack_usage_map_buf; + + if (initial_highest_arg_in_use) + memcpy (stack_usage_map, initial_stack_usage_map, + initial_highest_arg_in_use); + + if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) + memset (&stack_usage_map[initial_highest_arg_in_use], 0, + (highest_outgoing_arg_in_use + - initial_highest_arg_in_use)); + needed = 0; + + /* The address of the outgoing argument list must not be + copied to a register here, because argblock would be left + pointing to the wrong place after the call to + allocate_dynamic_stack_space below. */ + + argblock = virtual_outgoing_args_rtx; + } + else + { + if (inhibit_defer_pop == 0) + { + /* Try to reuse some or all of the pending_stack_adjust + to get this space. */ + needed + = (combine_pending_stack_adjustment_and_call + (unadjusted_args_size, + &adjusted_args_size, + preferred_unit_stack_boundary)); + + /* combine_pending_stack_adjustment_and_call computes + an adjustment before the arguments are allocated. + Account for them and see whether or not the stack + needs to go up or down. */ + needed = unadjusted_args_size - needed; + + if (needed < 0) + { + /* We're releasing stack space. */ + /* ??? We can avoid any adjustment at all if we're + already aligned. FIXME. */ + pending_stack_adjust = -needed; + do_pending_stack_adjust (); + needed = 0; + } + else + /* We need to allocate space. We'll do that in + push_block below. */ + pending_stack_adjust = 0; + } + + /* Special case this because overhead of `push_block' in + this case is non-trivial. */ + if (needed == 0) + argblock = virtual_outgoing_args_rtx; + else + { + argblock = push_block (GEN_INT (needed), 0, 0); #ifdef ARGS_GROW_DOWNWARD - argblock = plus_constant (argblock, needed); + argblock = plus_constant (argblock, needed); #endif - } - - /* We only really need to call `copy_to_reg' in the case - where push insns are going to be used to pass ARGBLOCK - to a function call in ARGS. In that case, the stack - pointer changes value from the allocation point to the - call point, and hence the value of - VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might - as well always do it. */ - argblock = copy_to_reg (argblock); - } - } - } + } + + /* We only really need to call `copy_to_reg' in the case + where push insns are going to be used to pass ARGBLOCK + to a function call in ARGS. In that case, the stack + pointer changes value from the allocation point to the + call point, and hence the value of + VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might + as well always do it. */ + argblock = copy_to_reg (argblock); + } + } + } if (ACCUMULATE_OUTGOING_ARGS) - { - /* The save/restore code in store_one_arg handles all - cases except one: a constructor call (including a C - function returning a BLKmode struct) to initialize - an argument. */ - if (stack_arg_under_construction) - { - rtx push_size - = GEN_INT (adjusted_args_size.constant - + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype - : TREE_TYPE (fndecl))) ? 0 - : reg_parm_stack_space)); - if (old_stack_level == 0) - { - emit_stack_save (SAVE_BLOCK, &old_stack_level, - NULL_RTX); - old_stack_pointer_delta = stack_pointer_delta; - old_pending_adj = pending_stack_adjust; - pending_stack_adjust = 0; - /* stack_arg_under_construction says whether a stack - arg is being constructed at the old stack level. - Pushing the stack gets a clean outgoing argument - block. */ - old_stack_arg_under_construction - = stack_arg_under_construction; - stack_arg_under_construction = 0; - /* Make a new map for the new argument list. */ - if (stack_usage_map_buf) - free (stack_usage_map_buf); - stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use); - stack_usage_map = stack_usage_map_buf; - highest_outgoing_arg_in_use = 0; - } - allocate_dynamic_stack_space (push_size, NULL_RTX, - BITS_PER_UNIT); - } - - /* If argument evaluation might modify the stack pointer, - copy the address of the argument list to a register. */ - for (i = 0; i < num_actuals; i++) - if (args[i].pass_on_stack) - { - argblock = copy_addr_to_reg (argblock); - break; - } - } + { + /* The save/restore code in store_one_arg handles all + cases except one: a constructor call (including a C + function returning a BLKmode struct) to initialize + an argument. */ + if (stack_arg_under_construction) + { + rtx push_size + = GEN_INT (adjusted_args_size.constant + + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype + : TREE_TYPE (fndecl))) ? 0 + : reg_parm_stack_space)); + if (old_stack_level == 0) + { + emit_stack_save (SAVE_BLOCK, &old_stack_level, + NULL_RTX); + old_stack_pointer_delta = stack_pointer_delta; + old_pending_adj = pending_stack_adjust; + pending_stack_adjust = 0; + /* stack_arg_under_construction says whether a stack + arg is being constructed at the old stack level. + Pushing the stack gets a clean outgoing argument + block. */ + old_stack_arg_under_construction + = stack_arg_under_construction; + stack_arg_under_construction = 0; + /* Make a new map for the new argument list. */ + if (stack_usage_map_buf) + free (stack_usage_map_buf); + stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use); + stack_usage_map = stack_usage_map_buf; + highest_outgoing_arg_in_use = 0; + } + allocate_dynamic_stack_space (push_size, NULL_RTX, + BITS_PER_UNIT); + } + + /* If argument evaluation might modify the stack pointer, + copy the address of the argument list to a register. */ + for (i = 0; i < num_actuals; i++) + if (args[i].pass_on_stack) + { + argblock = copy_addr_to_reg (argblock); + break; + } + } compute_argument_addresses (args, argblock, num_actuals); /* If we push args individually in reverse order, perform stack alignment - before the first push (the last arg). */ + before the first push (the last arg). */ if (PUSH_ARGS_REVERSED && argblock == 0 - && adjusted_args_size.constant != unadjusted_args_size) - { - /* When the stack adjustment is pending, we get better code - by combining the adjustments. */ - if (pending_stack_adjust - && ! inhibit_defer_pop) - { - pending_stack_adjust - = (combine_pending_stack_adjustment_and_call - (unadjusted_args_size, - &adjusted_args_size, - preferred_unit_stack_boundary)); - do_pending_stack_adjust (); - } - else if (argblock == 0) - anti_adjust_stack (GEN_INT (adjusted_args_size.constant - - unadjusted_args_size)); - } + && adjusted_args_size.constant != unadjusted_args_size) + { + /* When the stack adjustment is pending, we get better code + by combining the adjustments. */ + if (pending_stack_adjust + && ! inhibit_defer_pop) + { + pending_stack_adjust + = (combine_pending_stack_adjustment_and_call + (unadjusted_args_size, + &adjusted_args_size, + preferred_unit_stack_boundary)); + do_pending_stack_adjust (); + } + else if (argblock == 0) + anti_adjust_stack (GEN_INT (adjusted_args_size.constant + - unadjusted_args_size)); + } /* Now that the stack is properly aligned, pops can't safely - be deferred during the evaluation of the arguments. */ + be deferred during the evaluation of the arguments. */ NO_DEFER_POP; funexp = rtx_for_function_call (fndecl, addr); @@ -2753,472 +2772,472 @@ /* Figure out the register where the value, if any, will come back. */ valreg = 0; if (TYPE_MODE (rettype) != VOIDmode - && ! structure_value_addr) - { - if (pcc_struct_value) - valreg = hard_function_value (build_pointer_type (rettype), - fndecl, NULL, (pass == 0)); - else - valreg = hard_function_value (rettype, fndecl, fntype, - (pass == 0)); - - /* If VALREG is a PARALLEL whose first member has a zero - offset, use that. This is for targets such as m68k that - return the same value in multiple places. */ - if (GET_CODE (valreg) == PARALLEL) - { - rtx elem = XVECEXP (valreg, 0, 0); - rtx where = XEXP (elem, 0); - rtx offset = XEXP (elem, 1); - if (offset == const0_rtx - && GET_MODE (where) == GET_MODE (valreg)) - valreg = where; - } - } + && ! structure_value_addr) + { + if (pcc_struct_value) + valreg = hard_function_value (build_pointer_type (rettype), + fndecl, NULL, (pass == 0)); + else + valreg = hard_function_value (rettype, fndecl, fntype, + (pass == 0)); + + /* If VALREG is a PARALLEL whose first member has a zero + offset, use that. This is for targets such as m68k that + return the same value in multiple places. */ + if (GET_CODE (valreg) == PARALLEL) + { + rtx elem = XVECEXP (valreg, 0, 0); + rtx where = XEXP (elem, 0); + rtx offset = XEXP (elem, 1); + if (offset == const0_rtx + && GET_MODE (where) == GET_MODE (valreg)) + valreg = where; + } + } /* Precompute all register parameters. It isn't safe to compute anything - once we have started filling any specific hard regs. */ + once we have started filling any specific hard regs. */ precompute_register_parameters (num_actuals, args, ®_parm_seen); if (CALL_EXPR_STATIC_CHAIN (exp)) - static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); + static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); else - static_chain_value = 0; + static_chain_value = 0; #ifdef REG_PARM_STACK_SPACE /* Save the fixed argument area if it's part of the caller's frame and - is clobbered by argument setup for this call. */ + is clobbered by argument setup for this call. */ if (ACCUMULATE_OUTGOING_ARGS && pass) - save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, - &low_to_save, &high_to_save); + save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, + &low_to_save, &high_to_save); #endif /* Now store (and compute if necessary) all non-register parms. - These come before register parms, since they can require block-moves, - which could clobber the registers used for register parms. - Parms which have partial registers are not stored here, - but we do preallocate space here if they want that. */ + These come before register parms, since they can require block-moves, + which could clobber the registers used for register parms. + Parms which have partial registers are not stored here, + but we do preallocate space here if they want that. */ for (i = 0; i < num_actuals; i++) - { - if (args[i].reg == 0 || args[i].pass_on_stack) - { - rtx before_arg = get_last_insn (); - - if (store_one_arg (&args[i], argblock, flags, - adjusted_args_size.var != 0, - reg_parm_stack_space) - || (pass == 0 - && check_sibcall_argument_overlap (before_arg, - &args[i], 1))) - sibcall_failure = 1; - } - - if (((flags & ECF_CONST) - || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) - && args[i].stack) - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_USE (VOIDmode, - args[i].stack), - call_fusage); - } + { + if (args[i].reg == 0 || args[i].pass_on_stack) + { + rtx before_arg = get_last_insn (); + + if (store_one_arg (&args[i], argblock, flags, + adjusted_args_size.var != 0, + reg_parm_stack_space) + || (pass == 0 + && check_sibcall_argument_overlap (before_arg, + &args[i], 1))) + sibcall_failure = 1; + } + + if (((flags & ECF_CONST) + || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) + && args[i].stack) + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_USE (VOIDmode, + args[i].stack), + call_fusage); + } /* If we have a parm that is passed in registers but not in memory - and whose alignment does not permit a direct copy into registers, - make a group of pseudos that correspond to each register that we - will later fill. */ + and whose alignment does not permit a direct copy into registers, + make a group of pseudos that correspond to each register that we + will later fill. */ if (STRICT_ALIGNMENT) - store_unaligned_arguments_into_pseudos (args, num_actuals); + store_unaligned_arguments_into_pseudos (args, num_actuals); /* Now store any partially-in-registers parm. - This is the last place a block-move can happen. */ + This is the last place a block-move can happen. */ if (reg_parm_seen) - for (i = 0; i < num_actuals; i++) - if (args[i].partial != 0 && ! args[i].pass_on_stack) - { - rtx before_arg = get_last_insn (); - - if (store_one_arg (&args[i], argblock, flags, - adjusted_args_size.var != 0, - reg_parm_stack_space) - || (pass == 0 - && check_sibcall_argument_overlap (before_arg, - &args[i], 1))) - sibcall_failure = 1; - } + for (i = 0; i < num_actuals; i++) + if (args[i].partial != 0 && ! args[i].pass_on_stack) + { + rtx before_arg = get_last_insn (); + + if (store_one_arg (&args[i], argblock, flags, + adjusted_args_size.var != 0, + reg_parm_stack_space) + || (pass == 0 + && check_sibcall_argument_overlap (before_arg, + &args[i], 1))) + sibcall_failure = 1; + } /* If we pushed args in forward order, perform stack alignment - after pushing the last arg. */ + after pushing the last arg. */ if (!PUSH_ARGS_REVERSED && argblock == 0) - anti_adjust_stack (GEN_INT (adjusted_args_size.constant - - unadjusted_args_size)); + anti_adjust_stack (GEN_INT (adjusted_args_size.constant + - unadjusted_args_size)); /* If register arguments require space on the stack and stack space - was not preallocated, allocate stack space here for arguments - passed in registers. */ + was not preallocated, allocate stack space here for arguments + passed in registers. */ if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) && !ACCUMULATE_OUTGOING_ARGS - && must_preallocate == 0 && reg_parm_stack_space > 0) - anti_adjust_stack (GEN_INT (reg_parm_stack_space)); + && must_preallocate == 0 && reg_parm_stack_space > 0) + anti_adjust_stack (GEN_INT (reg_parm_stack_space)); /* Pass the function the address in which to return a - structure value. */ + structure value. */ if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) - { - structure_value_addr - = convert_memory_address (Pmode, structure_value_addr); - emit_move_insn (struct_value, - force_reg (Pmode, - force_operand (structure_value_addr, - NULL_RTX))); - - if (REG_P (struct_value)) - use_reg (&call_fusage, struct_value); - } + { + structure_value_addr + = convert_memory_address (Pmode, structure_value_addr); + emit_move_insn (struct_value, + force_reg (Pmode, + force_operand (structure_value_addr, + NULL_RTX))); + + if (REG_P (struct_value)) + use_reg (&call_fusage, struct_value); + } after_args = get_last_insn (); funexp = prepare_call_address (fndecl, funexp, static_chain_value, - &call_fusage, reg_parm_seen, pass == 0); + &call_fusage, reg_parm_seen, pass == 0); load_register_parameters (args, num_actuals, &call_fusage, flags, - pass == 0, &sibcall_failure); + pass == 0, &sibcall_failure); /* Save a pointer to the last insn before the call, so that we can - later safely search backwards to find the CALL_INSN. */ + later safely search backwards to find the CALL_INSN. */ before_call = get_last_insn (); /* Set up next argument register. For sibling calls on machines - with register windows this should be the incoming register. */ + with register windows this should be the incoming register. */ #ifdef FUNCTION_INCOMING_ARG if (pass == 0) - next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, - void_type_node, 1); + next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, + void_type_node, 1); else #endif - next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, - void_type_node, 1); + next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, + void_type_node, 1); /* All arguments and registers used for the call must be set up by - now! */ + now! */ /* Stack must be properly aligned now. */ gcc_assert (!pass - || !(stack_pointer_delta % preferred_unit_stack_boundary)); + || !(stack_pointer_delta % preferred_unit_stack_boundary)); /* Generate the actual call instruction. */ emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, - adjusted_args_size.constant, struct_value_size, - next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, - flags, & args_so_far); + adjusted_args_size.constant, struct_value_size, + next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, + flags, & args_so_far); /* If the call setup or the call itself overlaps with anything - of the argument setup we probably clobbered our call address. - In that case we can't do sibcalls. */ + of the argument setup we probably clobbered our call address. + In that case we can't do sibcalls. */ if (pass == 0 - && check_sibcall_argument_overlap (after_args, 0, 0)) - sibcall_failure = 1; + && check_sibcall_argument_overlap (after_args, 0, 0)) + sibcall_failure = 1; /* If a non-BLKmode value is returned at the most significant end - of a register, shift the register right by the appropriate amount - and update VALREG accordingly. BLKmode values are handled by the - group load/store machinery below. */ + of a register, shift the register right by the appropriate amount + and update VALREG accordingly. BLKmode values are handled by the + group load/store machinery below. */ if (!structure_value_addr - && !pcc_struct_value - && TYPE_MODE (rettype) != BLKmode - && targetm.calls.return_in_msb (rettype)) - { - if (shift_return_value (TYPE_MODE (rettype), false, valreg)) - sibcall_failure = 1; - valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); - } + && !pcc_struct_value + && TYPE_MODE (rettype) != BLKmode + && targetm.calls.return_in_msb (rettype)) + { + if (shift_return_value (TYPE_MODE (rettype), false, valreg)) + sibcall_failure = 1; + valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); + } if (pass && (flags & ECF_MALLOC)) - { - rtx temp = gen_reg_rtx (GET_MODE (valreg)); - rtx last, insns; - - /* The return value from a malloc-like function is a pointer. */ - if (TREE_CODE (rettype) == POINTER_TYPE) - mark_reg_pointer (temp, BIGGEST_ALIGNMENT); - - emit_move_insn (temp, valreg); - - /* The return value from a malloc-like function can not alias - anything else. */ - last = get_last_insn (); - add_reg_note (last, REG_NOALIAS, temp); - - /* Write out the sequence. */ - insns = get_insns (); - end_sequence (); - emit_insn (insns); - valreg = temp; - } + { + rtx temp = gen_reg_rtx (GET_MODE (valreg)); + rtx last, insns; + + /* The return value from a malloc-like function is a pointer. */ + if (TREE_CODE (rettype) == POINTER_TYPE) + mark_reg_pointer (temp, BIGGEST_ALIGNMENT); + + emit_move_insn (temp, valreg); + + /* The return value from a malloc-like function can not alias + anything else. */ + last = get_last_insn (); + add_reg_note (last, REG_NOALIAS, temp); + + /* Write out the sequence. */ + insns = get_insns (); + end_sequence (); + emit_insn (insns); + valreg = temp; + } /* For calls to `setjmp', etc., inform - function.c:setjmp_warnings that it should complain if - nonvolatile values are live. For functions that cannot - return, inform flow that control does not fall through. */ + function.c:setjmp_warnings that it should complain if + nonvolatile values are live. For functions that cannot + return, inform flow that control does not fall through. */ if ((flags & ECF_NORETURN) || pass == 0) - { - /* The barrier must be emitted - immediately after the CALL_INSN. Some ports emit more - than just a CALL_INSN above, so we must search for it here. */ - - rtx last = get_last_insn (); - while (!CALL_P (last)) - { - last = PREV_INSN (last); - /* There was no CALL_INSN? */ - gcc_assert (last != before_call); - } - - emit_barrier_after (last); - - /* Stack adjustments after a noreturn call are dead code. - However when NO_DEFER_POP is in effect, we must preserve - stack_pointer_delta. */ - if (inhibit_defer_pop == 0) - { - stack_pointer_delta = old_stack_allocated; - pending_stack_adjust = 0; - } - } + { + /* The barrier must be emitted + immediately after the CALL_INSN. Some ports emit more + than just a CALL_INSN above, so we must search for it here. */ + + rtx last = get_last_insn (); + while (!CALL_P (last)) + { + last = PREV_INSN (last); + /* There was no CALL_INSN? */ + gcc_assert (last != before_call); + } + + emit_barrier_after (last); + + /* Stack adjustments after a noreturn call are dead code. + However when NO_DEFER_POP is in effect, we must preserve + stack_pointer_delta. */ + if (inhibit_defer_pop == 0) + { + stack_pointer_delta = old_stack_allocated; + pending_stack_adjust = 0; + } + } /* If value type not void, return an rtx for the value. */ if (TYPE_MODE (rettype) == VOIDmode - || ignore) - target = const0_rtx; + || ignore) + target = const0_rtx; else if (structure_value_addr) - { - if (target == 0 || !MEM_P (target)) - { - target - = gen_rtx_MEM (TYPE_MODE (rettype), - memory_address (TYPE_MODE (rettype), - structure_value_addr)); - set_mem_attributes (target, rettype, 1); - } - } + { + if (target == 0 || !MEM_P (target)) + { + target + = gen_rtx_MEM (TYPE_MODE (rettype), + memory_address (TYPE_MODE (rettype), + structure_value_addr)); + set_mem_attributes (target, rettype, 1); + } + } else if (pcc_struct_value) - { - /* This is the special C++ case where we need to - know what the true target was. We take care to - never use this value more than once in one expression. */ - target = gen_rtx_MEM (TYPE_MODE (rettype), - copy_to_reg (valreg)); - set_mem_attributes (target, rettype, 1); - } + { + /* This is the special C++ case where we need to + know what the true target was. We take care to + never use this value more than once in one expression. */ + target = gen_rtx_MEM (TYPE_MODE (rettype), + copy_to_reg (valreg)); + set_mem_attributes (target, rettype, 1); + } /* Handle calls that return values in multiple non-contiguous locations. - The Irix 6 ABI has examples of this. */ + The Irix 6 ABI has examples of this. */ else if (GET_CODE (valreg) == PARALLEL) - { - if (target == 0) - { - /* This will only be assigned once, so it can be readonly. */ - tree nt = build_qualified_type (rettype, - (TYPE_QUALS (rettype) - | TYPE_QUAL_CONST)); - - target = assign_temp (nt, 0, 1, 1); - } - - if (! rtx_equal_p (target, valreg)) - emit_group_store (target, valreg, rettype, - int_size_in_bytes (rettype)); - - /* We can not support sibling calls for this case. */ - sibcall_failure = 1; - } + { + if (target == 0) + { + /* This will only be assigned once, so it can be readonly. */ + tree nt = build_qualified_type (rettype, + (TYPE_QUALS (rettype) + | TYPE_QUAL_CONST)); + + target = assign_temp (nt, 0, 1, 1); + } + + if (! rtx_equal_p (target, valreg)) + emit_group_store (target, valreg, rettype, + int_size_in_bytes (rettype)); + + /* We can not support sibling calls for this case. */ + sibcall_failure = 1; + } else if (target - && GET_MODE (target) == TYPE_MODE (rettype) - && GET_MODE (target) == GET_MODE (valreg)) - { - bool may_overlap = false; - - /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard - reg to a plain register. */ - if (!REG_P (target) || HARD_REGISTER_P (target)) - valreg = avoid_likely_spilled_reg (valreg); - - /* If TARGET is a MEM in the argument area, and we have - saved part of the argument area, then we can't store - directly into TARGET as it may get overwritten when we - restore the argument save area below. Don't work too - hard though and simply force TARGET to a register if it - is a MEM; the optimizer is quite likely to sort it out. */ - if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) - for (i = 0; i < num_actuals; i++) - if (args[i].save_area) - { - may_overlap = true; - break; - } - - if (may_overlap) - target = copy_to_reg (valreg); - else - { - /* TARGET and VALREG cannot be equal at this point - because the latter would not have - REG_FUNCTION_VALUE_P true, while the former would if - it were referring to the same register. - - If they refer to the same register, this move will be - a no-op, except when function inlining is being - done. */ - emit_move_insn (target, valreg); - - /* If we are setting a MEM, this code must be executed. - Since it is emitted after the call insn, sibcall - optimization cannot be performed in that case. */ - if (MEM_P (target)) - sibcall_failure = 1; - } - } + && GET_MODE (target) == TYPE_MODE (rettype) + && GET_MODE (target) == GET_MODE (valreg)) + { + bool may_overlap = false; + + /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard + reg to a plain register. */ + if (!REG_P (target) || HARD_REGISTER_P (target)) + valreg = avoid_likely_spilled_reg (valreg); + + /* If TARGET is a MEM in the argument area, and we have + saved part of the argument area, then we can't store + directly into TARGET as it may get overwritten when we + restore the argument save area below. Don't work too + hard though and simply force TARGET to a register if it + is a MEM; the optimizer is quite likely to sort it out. */ + if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) + for (i = 0; i < num_actuals; i++) + if (args[i].save_area) + { + may_overlap = true; + break; + } + + if (may_overlap) + target = copy_to_reg (valreg); + else + { + /* TARGET and VALREG cannot be equal at this point + because the latter would not have + REG_FUNCTION_VALUE_P true, while the former would if + it were referring to the same register. + + If they refer to the same register, this move will be + a no-op, except when function inlining is being + done. */ + emit_move_insn (target, valreg); + + /* If we are setting a MEM, this code must be executed. + Since it is emitted after the call insn, sibcall + optimization cannot be performed in that case. */ + if (MEM_P (target)) + sibcall_failure = 1; + } + } else if (TYPE_MODE (rettype) == BLKmode) - { - rtx val = valreg; - if (GET_MODE (val) != BLKmode) - val = avoid_likely_spilled_reg (val); - target = copy_blkmode_from_reg (target, val, rettype); - - /* We can not support sibling calls for this case. */ - sibcall_failure = 1; - } + { + rtx val = valreg; + if (GET_MODE (val) != BLKmode) + val = avoid_likely_spilled_reg (val); + target = copy_blkmode_from_reg (target, val, rettype); + + /* We can not support sibling calls for this case. */ + sibcall_failure = 1; + } else - target = copy_to_reg (avoid_likely_spilled_reg (valreg)); + target = copy_to_reg (avoid_likely_spilled_reg (valreg)); /* If we promoted this return value, make the proper SUBREG. TARGET might be const0_rtx here, so be careful. */ if (REG_P (target) - && TYPE_MODE (rettype) != BLKmode - && GET_MODE (target) != TYPE_MODE (rettype)) - { - tree type = rettype; - int unsignedp = TYPE_UNSIGNED (type); - int offset = 0; - enum machine_mode pmode; - - /* Ensure we promote as expected, and get the new unsignedness. */ - pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, - funtype, 1); - gcc_assert (GET_MODE (target) == pmode); - - if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) - && (GET_MODE_SIZE (GET_MODE (target)) - > GET_MODE_SIZE (TYPE_MODE (type)))) - { - offset = GET_MODE_SIZE (GET_MODE (target)) - - GET_MODE_SIZE (TYPE_MODE (type)); - if (! BYTES_BIG_ENDIAN) - offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; - else if (! WORDS_BIG_ENDIAN) - offset %= UNITS_PER_WORD; - } - - target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); - SUBREG_PROMOTED_VAR_P (target) = 1; - SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); - } + && TYPE_MODE (rettype) != BLKmode + && GET_MODE (target) != TYPE_MODE (rettype)) + { + tree type = rettype; + int unsignedp = TYPE_UNSIGNED (type); + int offset = 0; + enum machine_mode pmode; + + /* Ensure we promote as expected, and get the new unsignedness. */ + pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, + funtype, 1); + gcc_assert (GET_MODE (target) == pmode); + + if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) + && (GET_MODE_SIZE (GET_MODE (target)) + > GET_MODE_SIZE (TYPE_MODE (type)))) + { + offset = GET_MODE_SIZE (GET_MODE (target)) + - GET_MODE_SIZE (TYPE_MODE (type)); + if (! BYTES_BIG_ENDIAN) + offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; + else if (! WORDS_BIG_ENDIAN) + offset %= UNITS_PER_WORD; + } + + target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); + SUBREG_PROMOTED_VAR_P (target) = 1; + SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); + } /* If size of args is variable or this was a constructor call for a stack - argument, restore saved stack-pointer value. */ + argument, restore saved stack-pointer value. */ if (old_stack_level) - { - emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); - stack_pointer_delta = old_stack_pointer_delta; - pending_stack_adjust = old_pending_adj; - old_stack_allocated = stack_pointer_delta - pending_stack_adjust; - stack_arg_under_construction = old_stack_arg_under_construction; - highest_outgoing_arg_in_use = initial_highest_arg_in_use; - stack_usage_map = initial_stack_usage_map; - sibcall_failure = 1; - } + { + emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); + stack_pointer_delta = old_stack_pointer_delta; + pending_stack_adjust = old_pending_adj; + old_stack_allocated = stack_pointer_delta - pending_stack_adjust; + stack_arg_under_construction = old_stack_arg_under_construction; + highest_outgoing_arg_in_use = initial_highest_arg_in_use; + stack_usage_map = initial_stack_usage_map; + sibcall_failure = 1; + } else if (ACCUMULATE_OUTGOING_ARGS && pass) - { + { #ifdef REG_PARM_STACK_SPACE - if (save_area) - restore_fixed_argument_area (save_area, argblock, - high_to_save, low_to_save); + if (save_area) + restore_fixed_argument_area (save_area, argblock, + high_to_save, low_to_save); #endif - /* If we saved any argument areas, restore them. */ - for (i = 0; i < num_actuals; i++) - if (args[i].save_area) - { - enum machine_mode save_mode = GET_MODE (args[i].save_area); - rtx stack_area - = gen_rtx_MEM (save_mode, - memory_address (save_mode, - XEXP (args[i].stack_slot, 0))); - - if (save_mode != BLKmode) - emit_move_insn (stack_area, args[i].save_area); - else - emit_block_move (stack_area, args[i].save_area, - GEN_INT (args[i].locate.size.constant), - BLOCK_OP_CALL_PARM); - } - - highest_outgoing_arg_in_use = initial_highest_arg_in_use; - stack_usage_map = initial_stack_usage_map; - } + /* If we saved any argument areas, restore them. */ + for (i = 0; i < num_actuals; i++) + if (args[i].save_area) + { + enum machine_mode save_mode = GET_MODE (args[i].save_area); + rtx stack_area + = gen_rtx_MEM (save_mode, + memory_address (save_mode, + XEXP (args[i].stack_slot, 0))); + + if (save_mode != BLKmode) + emit_move_insn (stack_area, args[i].save_area); + else + emit_block_move (stack_area, args[i].save_area, + GEN_INT (args[i].locate.size.constant), + BLOCK_OP_CALL_PARM); + } + + highest_outgoing_arg_in_use = initial_highest_arg_in_use; + stack_usage_map = initial_stack_usage_map; + } /* If this was alloca, record the new stack level for nonlocal gotos. - Check for the handler slots since we might not have a save area - for non-local gotos. */ + Check for the handler slots since we might not have a save area + for non-local gotos. */ if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) - update_nonlocal_goto_save_area (); + update_nonlocal_goto_save_area (); /* Free up storage we no longer need. */ for (i = 0; i < num_actuals; ++i) - if (args[i].aligned_regs) - free (args[i].aligned_regs); + if (args[i].aligned_regs) + free (args[i].aligned_regs); insns = get_insns (); end_sequence (); if (pass == 0) - { - tail_call_insns = insns; - - /* Restore the pending stack adjustment now that we have - finished generating the sibling call sequence. */ - - pending_stack_adjust = save_pending_stack_adjust; - stack_pointer_delta = save_stack_pointer_delta; - - /* Prepare arg structure for next iteration. */ - for (i = 0; i < num_actuals; i++) - { - args[i].value = 0; - args[i].aligned_regs = 0; - args[i].stack = 0; - } - - sbitmap_free (stored_args_map); - } + { + tail_call_insns = insns; + + /* Restore the pending stack adjustment now that we have + finished generating the sibling call sequence. */ + + pending_stack_adjust = save_pending_stack_adjust; + stack_pointer_delta = save_stack_pointer_delta; + + /* Prepare arg structure for next iteration. */ + for (i = 0; i < num_actuals; i++) + { + args[i].value = 0; + args[i].aligned_regs = 0; + args[i].stack = 0; + } + + sbitmap_free (stored_args_map); + } else - { - normal_call_insns = insns; - - /* Verify that we've deallocated all the stack we used. */ - gcc_assert ((flags & ECF_NORETURN) - || (old_stack_allocated - == stack_pointer_delta - pending_stack_adjust)); - } + { + normal_call_insns = insns; + + /* Verify that we've deallocated all the stack we used. */ + gcc_assert ((flags & ECF_NORETURN) + || (old_stack_allocated + == stack_pointer_delta - pending_stack_adjust)); + } /* If something prevents making this a sibling call, - zero out the sequence. */ + zero out the sequence. */ if (sibcall_failure) - tail_call_insns = NULL_RTX; + tail_call_insns = NULL_RTX; else - break; + break; } /* If tail call production succeeded, we need to remove REG_EQUIV notes on @@ -3261,14 +3280,14 @@ rtx note; /* There are never REG_EQUIV notes for the incoming arguments - after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ + after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ if (NOTE_P (insn) - && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) - break; + && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) + break; note = find_reg_note (insn, REG_EQUIV, 0); if (note) - remove_note (insn, note); + remove_note (insn, note); note = find_reg_note (insn, REG_EQUIV, 0); gcc_assert (!note); } @@ -3286,8 +3305,8 @@ { tree type = TREE_VALUE (p); if (TREE_CODE (type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (type)) - goto found; + && targetm.calls.split_complex_arg (type)) + goto found; } return types; @@ -3299,22 +3318,22 @@ tree complex_type = TREE_VALUE (p); if (TREE_CODE (complex_type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (complex_type)) - { - tree next, imag; - - /* Rewrite complex type with component type. */ - TREE_VALUE (p) = TREE_TYPE (complex_type); - next = TREE_CHAIN (p); - - /* Add another component type for the imaginary part. */ - imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); - TREE_CHAIN (p) = imag; - TREE_CHAIN (imag) = next; - - /* Skip the newly created node. */ - p = TREE_CHAIN (p); - } + && targetm.calls.split_complex_arg (complex_type)) + { + tree next, imag; + + /* Rewrite complex type with component type. */ + TREE_VALUE (p) = TREE_TYPE (complex_type); + next = TREE_CHAIN (p); + + /* Add another component type for the imaginary part. */ + imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); + TREE_CHAIN (p) = imag; + TREE_CHAIN (imag) = next; + + /* Skip the newly created node. */ + p = TREE_CHAIN (p); + } } return types; @@ -3326,8 +3345,8 @@ static rtx emit_library_call_value_1 (int retval, rtx orgfun, rtx value, - enum libcall_type fn_type, - enum machine_mode outmode, int nargs, va_list p) + enum libcall_type fn_type, + enum machine_mode outmode, int nargs, va_list p) { /* Total size in bytes of all the stack-parms scanned so far. */ struct args_size args_size; @@ -3363,7 +3382,7 @@ int reg_parm_stack_space = 0; int needed; rtx before_call; - tree tfom; /* type_for_mode (outmode, 0) */ + tree tfom; /* type_for_mode (outmode, 0) */ #ifdef REG_PARM_STACK_SPACE /* Define the boundary of the register parm stack space that needs to be @@ -3419,24 +3438,24 @@ { tfom = lang_hooks.types.type_for_mode (outmode, 0); if (aggregate_value_p (tfom, 0)) - { + { #ifdef PCC_STATIC_STRUCT_RETURN - rtx pointer_reg - = hard_function_value (build_pointer_type (tfom), 0, 0, 0); - mem_value = gen_rtx_MEM (outmode, pointer_reg); - pcc_struct_value = 1; - if (value == 0) - value = gen_reg_rtx (outmode); + rtx pointer_reg + = hard_function_value (build_pointer_type (tfom), 0, 0, 0); + mem_value = gen_rtx_MEM (outmode, pointer_reg); + pcc_struct_value = 1; + if (value == 0) + value = gen_reg_rtx (outmode); #else /* not PCC_STATIC_STRUCT_RETURN */ - struct_value_size = GET_MODE_SIZE (outmode); - if (value != 0 && MEM_P (value)) - mem_value = value; - else - mem_value = assign_temp (tfom, 0, 1, 1); + struct_value_size = GET_MODE_SIZE (outmode); + if (value != 0 && MEM_P (value)) + mem_value = value; + else + mem_value = assign_temp (tfom, 0, 1, 1); #endif - /* This call returns a big structure. */ - flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); - } + /* This call returns a big structure. */ + flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); + } } else tfom = void_type_node; @@ -3476,8 +3495,8 @@ /* Make sure it is a reasonable operand for a move or push insn. */ if (!REG_P (addr) && !MEM_P (addr) - && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr))) - addr = force_operand (addr, NULL_RTX); + && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr))) + addr = force_operand (addr, NULL_RTX); argvec[count].value = addr; argvec[count].mode = Pmode; @@ -3485,19 +3504,19 @@ argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1); gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode, - NULL_TREE, 1) == 0); + NULL_TREE, 1) == 0); locate_and_pad_parm (Pmode, NULL_TREE, #ifdef STACK_PARMS_IN_REG_PARM_AREA - 1, + 1, #else - argvec[count].reg != 0, + argvec[count].reg != 0, #endif - 0, NULL_TREE, &args_size, &argvec[count].locate); + 0, NULL_TREE, &args_size, &argvec[count].locate); if (argvec[count].reg == 0 || argvec[count].partial != 0 - || reg_parm_stack_space > 0) - args_size.constant += argvec[count].locate.size.constant; + || reg_parm_stack_space > 0) + args_size.constant += argvec[count].locate.size.constant; FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1); @@ -3510,50 +3529,50 @@ enum machine_mode mode = (enum machine_mode) va_arg (p, int); /* We cannot convert the arg value to the mode the library wants here; - must do it earlier where we know the signedness of the arg. */ + must do it earlier where we know the signedness of the arg. */ gcc_assert (mode != BLKmode - && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); + && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); /* Make sure it is a reasonable operand for a move or push insn. */ if (!REG_P (val) && !MEM_P (val) - && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) - val = force_operand (val, NULL_RTX); + && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) + val = force_operand (val, NULL_RTX); if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1)) - { - rtx slot; - int must_copy - = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1); - - /* If this was a CONST function, it is now PURE since it now - reads memory. */ - if (flags & ECF_CONST) - { - flags &= ~ECF_CONST; - flags |= ECF_PURE; - } - - if (MEM_P (val) && !must_copy) - slot = val; - else - { - slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), - 0, 1, 1); - emit_move_insn (slot, val); - } - - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_USE (VOIDmode, slot), - call_fusage); - if (must_copy) - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_CLOBBER (VOIDmode, - slot), - call_fusage); - - mode = Pmode; - val = force_operand (XEXP (slot, 0), NULL_RTX); - } + { + rtx slot; + int must_copy + = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1); + + /* If this was a CONST function, it is now PURE since it now + reads memory. */ + if (flags & ECF_CONST) + { + flags &= ~ECF_CONST; + flags |= ECF_PURE; + } + + if (MEM_P (val) && !must_copy) + slot = val; + else + { + slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), + 0, 1, 1); + emit_move_insn (slot, val); + } + + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_USE (VOIDmode, slot), + call_fusage); + if (must_copy) + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, + gen_rtx_CLOBBER (VOIDmode, + slot), + call_fusage); + + mode = Pmode; + val = force_operand (XEXP (slot, 0), NULL_RTX); + } argvec[count].value = val; argvec[count].mode = mode; @@ -3561,22 +3580,22 @@ argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); argvec[count].partial - = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1); + = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1); locate_and_pad_parm (mode, NULL_TREE, #ifdef STACK_PARMS_IN_REG_PARM_AREA - 1, + 1, #else - argvec[count].reg != 0, + argvec[count].reg != 0, #endif - argvec[count].partial, - NULL_TREE, &args_size, &argvec[count].locate); + argvec[count].partial, + NULL_TREE, &args_size, &argvec[count].locate); gcc_assert (!argvec[count].locate.size.var); if (argvec[count].reg == 0 || argvec[count].partial != 0 - || reg_parm_stack_space > 0) - args_size.constant += argvec[count].locate.size.constant; + || reg_parm_stack_space > 0) + args_size.constant += argvec[count].locate.size.constant; FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1); } @@ -3587,14 +3606,14 @@ original_args_size = args_size; args_size.constant = (((args_size.constant - + stack_pointer_delta - + STACK_BYTES - 1) - / STACK_BYTES - * STACK_BYTES) - - stack_pointer_delta); + + stack_pointer_delta + + STACK_BYTES - 1) + / STACK_BYTES + * STACK_BYTES) + - stack_pointer_delta); args_size.constant = MAX (args_size.constant, - reg_parm_stack_space); + reg_parm_stack_space); if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) args_size.constant -= reg_parm_stack_space; @@ -3605,65 +3624,65 @@ if (ACCUMULATE_OUTGOING_ARGS) { /* Since the stack pointer will never be pushed, it is possible for - the evaluation of a parm to clobber something we have already - written to the stack. Since most function calls on RISC machines - do not use the stack, this is uncommon, but must work correctly. - - Therefore, we save any area of the stack that was already written - and that we are using. Here we set up to do this by making a new - stack usage map from the old one. - - Another approach might be to try to reorder the argument - evaluations to avoid this conflicting stack usage. */ + the evaluation of a parm to clobber something we have already + written to the stack. Since most function calls on RISC machines + do not use the stack, this is uncommon, but must work correctly. + + Therefore, we save any area of the stack that was already written + and that we are using. Here we set up to do this by making a new + stack usage map from the old one. + + Another approach might be to try to reorder the argument + evaluations to avoid this conflicting stack usage. */ needed = args_size.constant; /* Since we will be writing into the entire argument area, the - map must be allocated for its entire size, not just the part that - is the responsibility of the caller. */ + map must be allocated for its entire size, not just the part that + is the responsibility of the caller. */ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) - needed += reg_parm_stack_space; + needed += reg_parm_stack_space; #ifdef ARGS_GROW_DOWNWARD highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed + 1); + needed + 1); #else highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, - needed); + needed); #endif stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); stack_usage_map = stack_usage_map_buf; if (initial_highest_arg_in_use) - memcpy (stack_usage_map, initial_stack_usage_map, - initial_highest_arg_in_use); + memcpy (stack_usage_map, initial_stack_usage_map, + initial_highest_arg_in_use); if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) - memset (&stack_usage_map[initial_highest_arg_in_use], 0, - highest_outgoing_arg_in_use - initial_highest_arg_in_use); + memset (&stack_usage_map[initial_highest_arg_in_use], 0, + highest_outgoing_arg_in_use - initial_highest_arg_in_use); needed = 0; /* We must be careful to use virtual regs before they're instantiated, - and real regs afterwards. Loop optimization, for example, can create - new libcalls after we've instantiated the virtual regs, and if we - use virtuals anyway, they won't match the rtl patterns. */ + and real regs afterwards. Loop optimization, for example, can create + new libcalls after we've instantiated the virtual regs, and if we + use virtuals anyway, they won't match the rtl patterns. */ if (virtuals_instantiated) - argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET); + argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET); else - argblock = virtual_outgoing_args_rtx; + argblock = virtual_outgoing_args_rtx; } else { if (!PUSH_ARGS) - argblock = push_block (GEN_INT (args_size.constant), 0, 0); + argblock = push_block (GEN_INT (args_size.constant), 0, 0); } /* If we push args individually in reverse order, perform stack alignment before the first push (the last arg). */ if (argblock == 0 && PUSH_ARGS_REVERSED) anti_adjust_stack (GEN_INT (args_size.constant - - original_args_size.constant)); + - original_args_size.constant)); if (PUSH_ARGS_REVERSED) { @@ -3680,10 +3699,10 @@ if (ACCUMULATE_OUTGOING_ARGS) { /* The argument list is the property of the called routine and it - may clobber it. If the fixed area has been used for previous - parameters, we must save and restore it. */ + may clobber it. If the fixed area has been used for previous + parameters, we must save and restore it. */ save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, - &low_to_save, &high_to_save); + &low_to_save, &high_to_save); } #endif @@ -3701,107 +3720,107 @@ int lower_bound = 0, upper_bound = 0, i; if (! (reg != 0 && partial == 0)) - { - if (ACCUMULATE_OUTGOING_ARGS) - { - /* If this is being stored into a pre-allocated, fixed-size, - stack area, save any previous data at that location. */ + { + if (ACCUMULATE_OUTGOING_ARGS) + { + /* If this is being stored into a pre-allocated, fixed-size, + stack area, save any previous data at that location. */ #ifdef ARGS_GROW_DOWNWARD - /* stack_slot is negative, but we want to index stack_usage_map - with positive values. */ - upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; - lower_bound = upper_bound - argvec[argnum].locate.size.constant; + /* stack_slot is negative, but we want to index stack_usage_map + with positive values. */ + upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; + lower_bound = upper_bound - argvec[argnum].locate.size.constant; #else - lower_bound = argvec[argnum].locate.slot_offset.constant; - upper_bound = lower_bound + argvec[argnum].locate.size.constant; + lower_bound = argvec[argnum].locate.slot_offset.constant; + upper_bound = lower_bound + argvec[argnum].locate.size.constant; #endif - i = lower_bound; - /* Don't worry about things in the fixed argument area; - it has already been saved. */ - if (i < reg_parm_stack_space) - i = reg_parm_stack_space; - while (i < upper_bound && stack_usage_map[i] == 0) - i++; - - if (i < upper_bound) - { - /* We need to make a save area. */ - unsigned int size - = argvec[argnum].locate.size.constant * BITS_PER_UNIT; - enum machine_mode save_mode - = mode_for_size (size, MODE_INT, 1); - rtx adr - = plus_constant (argblock, - argvec[argnum].locate.offset.constant); - rtx stack_area - = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); - - if (save_mode == BLKmode) - { - argvec[argnum].save_area - = assign_stack_temp (BLKmode, - argvec[argnum].locate.size.constant, - 0); - - emit_block_move (validize_mem (argvec[argnum].save_area), - stack_area, - GEN_INT (argvec[argnum].locate.size.constant), - BLOCK_OP_CALL_PARM); - } - else - { - argvec[argnum].save_area = gen_reg_rtx (save_mode); - - emit_move_insn (argvec[argnum].save_area, stack_area); - } - } - } - - emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, - partial, reg, 0, argblock, - GEN_INT (argvec[argnum].locate.offset.constant), - reg_parm_stack_space, - ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); - - /* Now mark the segment we just used. */ - if (ACCUMULATE_OUTGOING_ARGS) - for (i = lower_bound; i < upper_bound; i++) - stack_usage_map[i] = 1; - - NO_DEFER_POP; - - if ((flags & ECF_CONST) - || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) - { - rtx use; - - /* Indicate argument access so that alias.c knows that these - values are live. */ - if (argblock) - use = plus_constant (argblock, - argvec[argnum].locate.offset.constant); - else - /* When arguments are pushed, trying to tell alias.c where - exactly this argument is won't work, because the - auto-increment causes confusion. So we merely indicate - that we access something with a known mode somewhere on - the stack. */ - use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, - gen_rtx_SCRATCH (Pmode)); - use = gen_rtx_MEM (argvec[argnum].mode, use); - use = gen_rtx_USE (VOIDmode, use); - call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); - } - } + i = lower_bound; + /* Don't worry about things in the fixed argument area; + it has already been saved. */ + if (i < reg_parm_stack_space) + i = reg_parm_stack_space; + while (i < upper_bound && stack_usage_map[i] == 0) + i++; + + if (i < upper_bound) + { + /* We need to make a save area. */ + unsigned int size + = argvec[argnum].locate.size.constant * BITS_PER_UNIT; + enum machine_mode save_mode + = mode_for_size (size, MODE_INT, 1); + rtx adr + = plus_constant (argblock, + argvec[argnum].locate.offset.constant); + rtx stack_area + = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); + + if (save_mode == BLKmode) + { + argvec[argnum].save_area + = assign_stack_temp (BLKmode, + argvec[argnum].locate.size.constant, + 0); + + emit_block_move (validize_mem (argvec[argnum].save_area), + stack_area, + GEN_INT (argvec[argnum].locate.size.constant), + BLOCK_OP_CALL_PARM); + } + else + { + argvec[argnum].save_area = gen_reg_rtx (save_mode); + + emit_move_insn (argvec[argnum].save_area, stack_area); + } + } + } + + emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, + partial, reg, 0, argblock, + GEN_INT (argvec[argnum].locate.offset.constant), + reg_parm_stack_space, + ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); + + /* Now mark the segment we just used. */ + if (ACCUMULATE_OUTGOING_ARGS) + for (i = lower_bound; i < upper_bound; i++) + stack_usage_map[i] = 1; + + NO_DEFER_POP; + + if ((flags & ECF_CONST) + || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS)) + { + rtx use; + + /* Indicate argument access so that alias.c knows that these + values are live. */ + if (argblock) + use = plus_constant (argblock, + argvec[argnum].locate.offset.constant); + else + /* When arguments are pushed, trying to tell alias.c where + exactly this argument is won't work, because the + auto-increment causes confusion. So we merely indicate + that we access something with a known mode somewhere on + the stack. */ + use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, + gen_rtx_SCRATCH (Pmode)); + use = gen_rtx_MEM (argvec[argnum].mode, use); + use = gen_rtx_USE (VOIDmode, use); + call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); + } + } } /* If we pushed args in forward order, perform stack alignment after pushing the last arg. */ if (argblock == 0 && !PUSH_ARGS_REVERSED) anti_adjust_stack (GEN_INT (args_size.constant - - original_args_size.constant)); + - original_args_size.constant)); if (PUSH_ARGS_REVERSED) argnum = nargs - 1; @@ -3822,11 +3841,11 @@ int partial = argvec[argnum].partial; /* Handle calls that pass values in multiple non-contiguous - locations. The PA64 has examples of this for library calls. */ + locations. The PA64 has examples of this for library calls. */ if (reg != 0 && GET_CODE (reg) == PARALLEL) - emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); + emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); else if (reg != 0 && partial == 0) - emit_move_insn (reg, val); + emit_move_insn (reg, val); NO_DEFER_POP; } @@ -3836,42 +3855,42 @@ { rtx reg = argvec[count].reg; if (reg != 0 && GET_CODE (reg) == PARALLEL) - use_group_regs (&call_fusage, reg); + use_group_regs (&call_fusage, reg); else if (reg != 0) { - int partial = argvec[count].partial; - if (partial) - { - int nregs; + int partial = argvec[count].partial; + if (partial) + { + int nregs; gcc_assert (partial % UNITS_PER_WORD == 0); - nregs = partial / UNITS_PER_WORD; - use_regs (&call_fusage, REGNO (reg), nregs); - } - else - use_reg (&call_fusage, reg); - } + nregs = partial / UNITS_PER_WORD; + use_regs (&call_fusage, REGNO (reg), nregs); + } + else + use_reg (&call_fusage, reg); + } } /* Pass the function the address in which to return a structure value. */ if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) { emit_move_insn (struct_value, - force_reg (Pmode, - force_operand (XEXP (mem_value, 0), - NULL_RTX))); + force_reg (Pmode, + force_operand (XEXP (mem_value, 0), + NULL_RTX))); if (REG_P (struct_value)) - use_reg (&call_fusage, struct_value); + use_reg (&call_fusage, struct_value); } /* Don't allow popping to be deferred, since then cse'ing of library calls could delete a call and leave the pop. */ NO_DEFER_POP; valreg = (mem_value == 0 && outmode != VOIDmode - ? hard_libcall_value (outmode, orgfun) : NULL_RTX); + ? hard_libcall_value (outmode, orgfun) : NULL_RTX); /* Stack must be properly aligned now. */ gcc_assert (!(stack_pointer_delta - & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); + & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); before_call = get_last_insn (); @@ -3883,13 +3902,13 @@ no impact, so we pretend it is unknown. */ emit_call_1 (fun, NULL, - get_identifier (XSTR (orgfun, 0)), - build_function_type (tfom, NULL_TREE), - original_args_size.constant, args_size.constant, - struct_value_size, - FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1), - valreg, - old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far); + get_identifier (XSTR (orgfun, 0)), + build_function_type (tfom, NULL_TREE), + original_args_size.constant, args_size.constant, + struct_value_size, + FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1), + valreg, + old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far); /* For calls to `setjmp', etc., inform function.c:setjmp_warnings that it should complain if nonvolatile values are live. For @@ -3899,16 +3918,16 @@ if (flags & ECF_NORETURN) { /* The barrier note must be emitted - immediately after the CALL_INSN. Some ports emit more than - just a CALL_INSN above, so we must search for it here. */ + immediately after the CALL_INSN. Some ports emit more than + just a CALL_INSN above, so we must search for it here. */ rtx last = get_last_insn (); while (!CALL_P (last)) - { - last = PREV_INSN (last); - /* There was no CALL_INSN? */ - gcc_assert (last != before_call); - } + { + last = PREV_INSN (last); + /* There was no CALL_INSN? */ + gcc_assert (last != before_call); + } emit_barrier_after (last); } @@ -3922,64 +3941,64 @@ if (outmode != VOIDmode && retval) { if (mem_value) - { - if (value == 0) - value = mem_value; - if (value != mem_value) - emit_move_insn (value, mem_value); - } + { + if (value == 0) + value = mem_value; + if (value != mem_value) + emit_move_insn (value, mem_value); + } else if (GET_CODE (valreg) == PARALLEL) - { - if (value == 0) - value = gen_reg_rtx (outmode); - emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); - } + { + if (value == 0) + value = gen_reg_rtx (outmode); + emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); + } else - { - /* Convert to the proper mode if a promotion has been active. */ - if (GET_MODE (valreg) != outmode) - { - int unsignedp = TYPE_UNSIGNED (tfom); - - gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, - fndecl ? TREE_TYPE (fndecl) : fntype, 1) - == GET_MODE (valreg)); - valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); - } - - if (value != 0) - emit_move_insn (value, valreg); - else - value = valreg; - } + { + /* Convert to the proper mode if a promotion has been active. */ + if (GET_MODE (valreg) != outmode) + { + int unsignedp = TYPE_UNSIGNED (tfom); + + gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, + fndecl ? TREE_TYPE (fndecl) : fntype, 1) + == GET_MODE (valreg)); + valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); + } + + if (value != 0) + emit_move_insn (value, valreg); + else + value = valreg; + } } if (ACCUMULATE_OUTGOING_ARGS) { #ifdef REG_PARM_STACK_SPACE if (save_area) - restore_fixed_argument_area (save_area, argblock, - high_to_save, low_to_save); + restore_fixed_argument_area (save_area, argblock, + high_to_save, low_to_save); #endif /* If we saved any argument areas, restore them. */ for (count = 0; count < nargs; count++) - if (argvec[count].save_area) - { - enum machine_mode save_mode = GET_MODE (argvec[count].save_area); - rtx adr = plus_constant (argblock, - argvec[count].locate.offset.constant); - rtx stack_area = gen_rtx_MEM (save_mode, - memory_address (save_mode, adr)); - - if (save_mode == BLKmode) - emit_block_move (stack_area, - validize_mem (argvec[count].save_area), - GEN_INT (argvec[count].locate.size.constant), - BLOCK_OP_CALL_PARM); - else - emit_move_insn (stack_area, argvec[count].save_area); - } + if (argvec[count].save_area) + { + enum machine_mode save_mode = GET_MODE (argvec[count].save_area); + rtx adr = plus_constant (argblock, + argvec[count].locate.offset.constant); + rtx stack_area = gen_rtx_MEM (save_mode, + memory_address (save_mode, adr)); + + if (save_mode == BLKmode) + emit_block_move (stack_area, + validize_mem (argvec[count].save_area), + GEN_INT (argvec[count].locate.size.constant), + BLOCK_OP_CALL_PARM); + else + emit_move_insn (stack_area, argvec[count].save_area); + } highest_outgoing_arg_in_use = initial_highest_arg_in_use; stack_usage_map = initial_stack_usage_map; @@ -4004,7 +4023,7 @@ void emit_library_call (rtx orgfun, enum libcall_type fn_type, - enum machine_mode outmode, int nargs, ...) + enum machine_mode outmode, int nargs, ...) { va_list p; @@ -4023,15 +4042,15 @@ rtx emit_library_call_value (rtx orgfun, rtx value, - enum libcall_type fn_type, - enum machine_mode outmode, int nargs, ...) + enum libcall_type fn_type, + enum machine_mode outmode, int nargs, ...) { rtx result; va_list p; va_start (p, nargs); result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, - nargs, p); + nargs, p); va_end (p); return result; @@ -4058,7 +4077,7 @@ static int store_one_arg (struct arg_data *arg, rtx argblock, int flags, - int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) + int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) { tree pval = arg->tree_value; rtx reg = 0; @@ -4077,62 +4096,62 @@ if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)) { /* If this is being stored into a pre-allocated, fixed-size, stack area, - save any previous data at that location. */ + save any previous data at that location. */ if (argblock && ! variable_size && arg->stack) - { + { #ifdef ARGS_GROW_DOWNWARD - /* stack_slot is negative, but we want to index stack_usage_map - with positive values. */ - if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) - upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; - else - upper_bound = 0; - - lower_bound = upper_bound - arg->locate.size.constant; + /* stack_slot is negative, but we want to index stack_usage_map + with positive values. */ + if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) + upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; + else + upper_bound = 0; + + lower_bound = upper_bound - arg->locate.size.constant; #else - if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) - lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); - else - lower_bound = 0; - - upper_bound = lower_bound + arg->locate.size.constant; + if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) + lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); + else + lower_bound = 0; + + upper_bound = lower_bound + arg->locate.size.constant; #endif - i = lower_bound; - /* Don't worry about things in the fixed argument area; - it has already been saved. */ - if (i < reg_parm_stack_space) - i = reg_parm_stack_space; - while (i < upper_bound && stack_usage_map[i] == 0) - i++; - - if (i < upper_bound) - { - /* We need to make a save area. */ - unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; - enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1); - rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); - rtx stack_area = gen_rtx_MEM (save_mode, adr); - - if (save_mode == BLKmode) - { - tree ot = TREE_TYPE (arg->tree_value); - tree nt = build_qualified_type (ot, (TYPE_QUALS (ot) - | TYPE_QUAL_CONST)); - - arg->save_area = assign_temp (nt, 0, 1, 1); - preserve_temp_slots (arg->save_area); - emit_block_move (validize_mem (arg->save_area), stack_area, - GEN_INT (arg->locate.size.constant), - BLOCK_OP_CALL_PARM); - } - else - { - arg->save_area = gen_reg_rtx (save_mode); - emit_move_insn (arg->save_area, stack_area); - } - } - } + i = lower_bound; + /* Don't worry about things in the fixed argument area; + it has already been saved. */ + if (i < reg_parm_stack_space) + i = reg_parm_stack_space; + while (i < upper_bound && stack_usage_map[i] == 0) + i++; + + if (i < upper_bound) + { + /* We need to make a save area. */ + unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; + enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1); + rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); + rtx stack_area = gen_rtx_MEM (save_mode, adr); + + if (save_mode == BLKmode) + { + tree ot = TREE_TYPE (arg->tree_value); + tree nt = build_qualified_type (ot, (TYPE_QUALS (ot) + | TYPE_QUAL_CONST)); + + arg->save_area = assign_temp (nt, 0, 1, 1); + preserve_temp_slots (arg->save_area); + emit_block_move (validize_mem (arg->save_area), stack_area, + GEN_INT (arg->locate.size.constant), + BLOCK_OP_CALL_PARM); + } + else + { + arg->save_area = gen_reg_rtx (save_mode); + emit_move_insn (arg->save_area, stack_area); + } + } + } } /* If this isn't going to be placed on both the stack and in registers, @@ -4140,9 +4159,9 @@ if (! arg->pass_on_stack) { if (flags & ECF_SIBCALL) - reg = arg->tail_call_reg; + reg = arg->tail_call_reg; else - reg = arg->reg; + reg = arg->reg; partial = arg->partial; } @@ -4160,48 +4179,48 @@ if (arg->value == 0) { /* stack_arg_under_construction is nonzero if a function argument is - being evaluated directly into the outgoing argument list and - expand_call must take special action to preserve the argument list - if it is called recursively. - - For scalar function arguments stack_usage_map is sufficient to - determine which stack slots must be saved and restored. Scalar - arguments in general have pass_on_stack == 0. - - If this argument is initialized by a function which takes the - address of the argument (a C++ constructor or a C function - returning a BLKmode structure), then stack_usage_map is - insufficient and expand_call must push the stack around the - function call. Such arguments have pass_on_stack == 1. - - Note that it is always safe to set stack_arg_under_construction, - but this generates suboptimal code if set when not needed. */ + being evaluated directly into the outgoing argument list and + expand_call must take special action to preserve the argument list + if it is called recursively. + + For scalar function arguments stack_usage_map is sufficient to + determine which stack slots must be saved and restored. Scalar + arguments in general have pass_on_stack == 0. + + If this argument is initialized by a function which takes the + address of the argument (a C++ constructor or a C function + returning a BLKmode structure), then stack_usage_map is + insufficient and expand_call must push the stack around the + function call. Such arguments have pass_on_stack == 1. + + Note that it is always safe to set stack_arg_under_construction, + but this generates suboptimal code if set when not needed. */ if (arg->pass_on_stack) - stack_arg_under_construction++; + stack_arg_under_construction++; arg->value = expand_expr (pval, - (partial - || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) - ? NULL_RTX : arg->stack, - VOIDmode, EXPAND_STACK_PARM); + (partial + || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) + ? NULL_RTX : arg->stack, + VOIDmode, EXPAND_STACK_PARM); /* If we are promoting object (or for any other reason) the mode - doesn't agree, convert the mode. */ + doesn't agree, convert the mode. */ if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) - arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), - arg->value, arg->unsignedp); + arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), + arg->value, arg->unsignedp); if (arg->pass_on_stack) - stack_arg_under_construction--; + stack_arg_under_construction--; } /* Check for overlap with already clobbered argument area. */ if ((flags & ECF_SIBCALL) && MEM_P (arg->value) && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0), - arg->locate.size.constant)) + arg->locate.size.constant)) sibcall_failure = 1; /* Don't allow anything left on stack from computation @@ -4218,54 +4237,54 @@ unsigned int parm_align; /* Argument is a scalar, not entirely passed in registers. - (If part is passed in registers, arg->partial says how much - and emit_push_insn will take care of putting it there.) - - Push it, and if its size is less than the - amount of space allocated to it, - also bump stack pointer by the additional space. - Note that in C the default argument promotions - will prevent such mismatches. */ + (If part is passed in registers, arg->partial says how much + and emit_push_insn will take care of putting it there.) + + Push it, and if its size is less than the + amount of space allocated to it, + also bump stack pointer by the additional space. + Note that in C the default argument promotions + will prevent such mismatches. */ size = GET_MODE_SIZE (arg->mode); /* Compute how much space the push instruction will push. - On many machines, pushing a byte will advance the stack - pointer by a halfword. */ + On many machines, pushing a byte will advance the stack + pointer by a halfword. */ #ifdef PUSH_ROUNDING size = PUSH_ROUNDING (size); #endif used = size; /* Compute how much space the argument should get: - round up to a multiple of the alignment for arguments. */ + round up to a multiple of the alignment for arguments. */ if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) - used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) - / (PARM_BOUNDARY / BITS_PER_UNIT)) - * (PARM_BOUNDARY / BITS_PER_UNIT)); + used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) + / (PARM_BOUNDARY / BITS_PER_UNIT)) + * (PARM_BOUNDARY / BITS_PER_UNIT)); /* Compute the alignment of the pushed argument. */ parm_align = arg->locate.boundary; if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) - { - int pad = used - size; - if (pad) - { - unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT; - parm_align = MIN (parm_align, pad_align); - } - } + { + int pad = used - size; + if (pad) + { + unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT; + parm_align = MIN (parm_align, pad_align); + } + } /* This isn't already where we want it on the stack, so put it there. - This can either be done with push or copy insns. */ + This can either be done with push or copy insns. */ emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, - parm_align, partial, reg, used - size, argblock, - ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->locate.alignment_pad)); + parm_align, partial, reg, used - size, argblock, + ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, + ARGS_SIZE_RTX (arg->locate.alignment_pad)); /* Unless this is a partially-in-register argument, the argument is now - in the stack. */ + in the stack. */ if (partial == 0) - arg->value = arg->stack; + arg->value = arg->stack; } else { @@ -4276,115 +4295,115 @@ rtx size_rtx; /* Pushing a nonscalar. - If part is passed in registers, PARTIAL says how much - and emit_push_insn will take care of putting it there. */ + If part is passed in registers, PARTIAL says how much + and emit_push_insn will take care of putting it there. */ /* Round its size up to a multiple - of the allocation unit for arguments. */ + of the allocation unit for arguments. */ if (arg->locate.size.var != 0) - { - excess = 0; - size_rtx = ARGS_SIZE_RTX (arg->locate.size); - } + { + excess = 0; + size_rtx = ARGS_SIZE_RTX (arg->locate.size); + } else - { - /* PUSH_ROUNDING has no effect on us, because emit_push_insn - for BLKmode is careful to avoid it. */ - excess = (arg->locate.size.constant - - int_size_in_bytes (TREE_TYPE (pval)) - + partial); - size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), - NULL_RTX, TYPE_MODE (sizetype), - EXPAND_NORMAL); - } + { + /* PUSH_ROUNDING has no effect on us, because emit_push_insn + for BLKmode is careful to avoid it. */ + excess = (arg->locate.size.constant + - int_size_in_bytes (TREE_TYPE (pval)) + + partial); + size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), + NULL_RTX, TYPE_MODE (sizetype), + EXPAND_NORMAL); + } parm_align = arg->locate.boundary; /* When an argument is padded down, the block is aligned to - PARM_BOUNDARY, but the actual argument isn't. */ + PARM_BOUNDARY, but the actual argument isn't. */ if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) - { - if (arg->locate.size.var) - parm_align = BITS_PER_UNIT; - else if (excess) - { - unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; - parm_align = MIN (parm_align, excess_align); - } - } + { + if (arg->locate.size.var) + parm_align = BITS_PER_UNIT; + else if (excess) + { + unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; + parm_align = MIN (parm_align, excess_align); + } + } if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) - { - /* emit_push_insn might not work properly if arg->value and - argblock + arg->locate.offset areas overlap. */ - rtx x = arg->value; - int i = 0; - - if (XEXP (x, 0) == crtl->args.internal_arg_pointer - || (GET_CODE (XEXP (x, 0)) == PLUS - && XEXP (XEXP (x, 0), 0) == - crtl->args.internal_arg_pointer - && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) - { - if (XEXP (x, 0) != crtl->args.internal_arg_pointer) - i = INTVAL (XEXP (XEXP (x, 0), 1)); - - /* expand_call should ensure this. */ - gcc_assert (!arg->locate.offset.var - && arg->locate.size.var == 0 - && CONST_INT_P (size_rtx)); - - if (arg->locate.offset.constant > i) - { - if (arg->locate.offset.constant < i + INTVAL (size_rtx)) - sibcall_failure = 1; - } - else if (arg->locate.offset.constant < i) - { - /* Use arg->locate.size.constant instead of size_rtx - because we only care about the part of the argument - on the stack. */ - if (i < (arg->locate.offset.constant - + arg->locate.size.constant)) - sibcall_failure = 1; - } - else - { - /* Even though they appear to be at the same location, - if part of the outgoing argument is in registers, - they aren't really at the same location. Check for - this by making sure that the incoming size is the - same as the outgoing size. */ - if (arg->locate.size.constant != INTVAL (size_rtx)) - sibcall_failure = 1; - } - } - } + { + /* emit_push_insn might not work properly if arg->value and + argblock + arg->locate.offset areas overlap. */ + rtx x = arg->value; + int i = 0; + + if (XEXP (x, 0) == crtl->args.internal_arg_pointer + || (GET_CODE (XEXP (x, 0)) == PLUS + && XEXP (XEXP (x, 0), 0) == + crtl->args.internal_arg_pointer + && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) + { + if (XEXP (x, 0) != crtl->args.internal_arg_pointer) + i = INTVAL (XEXP (XEXP (x, 0), 1)); + + /* expand_call should ensure this. */ + gcc_assert (!arg->locate.offset.var + && arg->locate.size.var == 0 + && CONST_INT_P (size_rtx)); + + if (arg->locate.offset.constant > i) + { + if (arg->locate.offset.constant < i + INTVAL (size_rtx)) + sibcall_failure = 1; + } + else if (arg->locate.offset.constant < i) + { + /* Use arg->locate.size.constant instead of size_rtx + because we only care about the part of the argument + on the stack. */ + if (i < (arg->locate.offset.constant + + arg->locate.size.constant)) + sibcall_failure = 1; + } + else + { + /* Even though they appear to be at the same location, + if part of the outgoing argument is in registers, + they aren't really at the same location. Check for + this by making sure that the incoming size is the + same as the outgoing size. */ + if (arg->locate.size.constant != INTVAL (size_rtx)) + sibcall_failure = 1; + } + } + } emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, - parm_align, partial, reg, excess, argblock, - ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->locate.alignment_pad)); + parm_align, partial, reg, excess, argblock, + ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, + ARGS_SIZE_RTX (arg->locate.alignment_pad)); /* Unless this is a partially-in-register argument, the argument is now - in the stack. - - ??? Unlike the case above, in which we want the actual - address of the data, so that we can load it directly into a - register, here we want the address of the stack slot, so that - it's properly aligned for word-by-word copying or something - like that. It's not clear that this is always correct. */ + in the stack. + + ??? Unlike the case above, in which we want the actual + address of the data, so that we can load it directly into a + register, here we want the address of the stack slot, so that + it's properly aligned for word-by-word copying or something + like that. It's not clear that this is always correct. */ if (partial == 0) - arg->value = arg->stack_slot; + arg->value = arg->stack_slot; } if (arg->reg && GET_CODE (arg->reg) == PARALLEL) { tree type = TREE_TYPE (arg->tree_value); arg->parallel_value - = emit_group_load_into_temps (arg->reg, arg->value, type, - int_size_in_bytes (type)); + = emit_group_load_into_temps (arg->reg, arg->value, type, + int_size_in_bytes (type)); } /* Mark all slots this store used. */ @@ -4411,7 +4430,7 @@ bool must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED, - const_tree type) + const_tree type) { if (!type) return false; @@ -4452,7 +4471,7 @@ if (mode == BLKmode && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT) && (FUNCTION_ARG_PADDING (mode, type) - == (BYTES_BIG_ENDIAN ? upward : downward))) + == (BYTES_BIG_ENDIAN ? upward : downward))) return true; return false; diff -r bd49c42ec43e -r 60c1b2f8487a gcc/cbc-goto.h --- a/gcc/cbc-goto.h Mon Feb 15 17:39:45 2010 +0900 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,806 +0,0 @@ - -static void -preexpand_argument_expr (struct arg_data *, int); - -static void -determine_order(int *, int); - -static int -expand_one_arg_push (struct arg_data *, rtx, int, int, int); - -static void -push_overlaps(struct arg_data *, int); - -static int -check_frame_offset(rtx); - - -static rtx -expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype, - tree addr, - int ignore, - int flags, - int num_actuals, - struct arg_data *args, - struct args_size *args_size, - CUMULATIVE_ARGS args_so_far, - rtx old_stack_level, - int reg_parm_stack_space, - int old_pending_adj, - unsigned HOST_WIDE_INT preferred_stack_boundary, - unsigned HOST_WIDE_INT preferred_unit_stack_boundary, - rtx structure_value_addr, - //int structure_value_addr_parm, - int old_inhibit_defer_pop - ) -{ - - /* folowing variables is just copied from expand_call. */ - - int pass = 0; - int i; -#ifdef REG_PARM_STACK_SPACE - /* Define the boundary of the register parm stack space that needs to be - saved, if any. */ -#endif - rtx funexp; - rtx valreg; - struct args_size adjusted_args_size; - int unadjusted_args_size; - int reg_parm_seen; - rtx static_chain_value; - int old_stack_allocated; - int old_stack_pointer_delta = 0; - int old_stack_arg_under_construction = 0; - rtx call_fusage; - char *stack_usage_map_buf = NULL; - rtx argblock = 0; - HOST_WIDE_INT struct_value_size = 0; - int pcc_struct_value = 0; - int initial_highest_arg_in_use = highest_outgoing_arg_in_use; - char *initial_stack_usage_map = stack_usage_map; - rtx tail_call_insns = NULL_RTX; - - - int *store_order; - - - int sibcall_failure = 0; - /* We want to emit any pending stack adjustments before the tail - recursion "call". That way we know any adjustment after the tail - recursion call can be ignored if we indeed use the tail - call expansion. */ - int save_pending_stack_adjust = 0; - int save_stack_pointer_delta = 0; - rtx insns; - rtx before_call, next_arg_reg; - - /* State variables we need to save and restore between - iterations. */ - save_pending_stack_adjust = pending_stack_adjust; - save_stack_pointer_delta = stack_pointer_delta; - flags |= ECF_SIBCALL; - - /* Other state variables that we must reinitialize each time - through the loop (that are not initialized by the loop itself). */ - argblock = 0; - call_fusage = 0; - - /* Start a new sequence for the normal call case. - - From this point on, if the sibling call fails, we want to set - sibcall_failure instead of continuing the loop. */ - start_sequence (); - - /* Don't let pending stack adjusts add up to too much. - Also, do all pending adjustments now if there is any chance - this might be a call to alloca or if we are expanding a sibling - call sequence or if we are calling a function that is to return - with stack pointer depressed. - Also do the adjustments before a throwing call, otherwise - exception handling can fail; PR 19225. */ - if (pending_stack_adjust >= 32 - || (pending_stack_adjust > 0 - && (flags & ECF_MAY_BE_ALLOCA)) - || (pending_stack_adjust > 0 - && flag_exceptions && !(flags & ECF_NOTHROW)) - || pass == 0) - do_pending_stack_adjust (); - - - if (pass == 0 && crtl->stack_protect_guard) - stack_protect_epilogue (); - - adjusted_args_size = *args_size; - /* Compute the actual size of the argument block required. The variable - and constant sizes must be combined, the size may have to be rounded, - and there may be a minimum required size. When generating a sibcall - pattern, do not round up, since we'll be re-using whatever space our - caller provided. */ - unadjusted_args_size - = compute_argument_block_size (reg_parm_stack_space, - &adjusted_args_size, - fndecl, fntype, - (pass == 0 ? 0 - : preferred_stack_boundary)); - - old_stack_allocated = stack_pointer_delta - pending_stack_adjust; - - argblock = crtl->args.internal_arg_pointer; - - argblock -#ifdef STACK_GROWS_DOWNWARD - = plus_constant (argblock, crtl->args.pretend_args_size); -#else - = plus_constant (argblock, -crtl->args.pretend_args_size); -#endif - - - stored_args_map = sbitmap_alloc (args_size->constant); - sbitmap_zero (stored_args_map); - - - if (ACCUMULATE_OUTGOING_ARGS) - { - /* The save/restore code in store_one_arg handles all - cases except one: a constructor call (including a C - function returning a BLKmode struct) to initialize - an argument. */ - if (stack_arg_under_construction) - { - rtx push_size - = GEN_INT (adjusted_args_size.constant - + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype - : TREE_TYPE (fndecl))) ? 0 - : reg_parm_stack_space)); - if (old_stack_level == 0) - { - emit_stack_save (SAVE_BLOCK, &old_stack_level, - NULL_RTX); - old_stack_pointer_delta = stack_pointer_delta; - old_pending_adj = pending_stack_adjust; - pending_stack_adjust = 0; - /* stack_arg_under_construction says whether a stack - arg is being constructed at the old stack level. - Pushing the stack gets a clean outgoing argument - block. */ - old_stack_arg_under_construction - = stack_arg_under_construction; - stack_arg_under_construction = 0; - /* Make a new map for the new argument list. */ - if (stack_usage_map_buf) - free (stack_usage_map_buf); - stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); - stack_usage_map = stack_usage_map_buf; - memset (stack_usage_map, 0, highest_outgoing_arg_in_use); - highest_outgoing_arg_in_use = 0; - } - allocate_dynamic_stack_space (push_size, NULL_RTX, - BITS_PER_UNIT); - } - - /* If argument evaluation might modify the stack pointer, - copy the address of the argument list to a register. */ - for (i = 0; i < num_actuals; i++) - if (args[i].pass_on_stack) - { - argblock = copy_addr_to_reg (argblock); - break; - } - } - - compute_argument_addresses (args, argblock, num_actuals); - - /* in the case that - a function goto codesegment. - adjust stack space. */ - if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) ) - //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) ) - { - HOST_WIDE_INT padding; - padding = CbC_PRETENDED_STACK_SIZE - - (crtl->args.size - crtl->args.pretend_args_size); - if (0&&padding > 0) - anti_adjust_stack (GEN_INT (padding)); - } - - /* Now that the stack is properly aligned, pops can't safely - be deferred during the evaluation of the arguments. */ - NO_DEFER_POP; - - funexp = rtx_for_function_call (fndecl, addr); - - /* Figure out the register where the value, if any, will come back. */ - valreg = 0; - - - /* Precompute all register parameters. It isn't safe to compute anything - once we have started filling any specific hard regs. */ - precompute_register_parameters (num_actuals, args, ®_parm_seen); - - if (CALL_EXPR_STATIC_CHAIN (exp)) - static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); - else - static_chain_value = 0; - - - /* parallel assignment */ - store_order = alloca (num_actuals * sizeof (int)); - memset (store_order, 0, num_actuals * sizeof (int)); - - /* fill the arg[i]->exprs. */ - for (i = 0; i < num_actuals; i++) - { - if (args[i].reg == 0 || args[i].pass_on_stack) - { - preexpand_argument_expr (&args[i], - adjusted_args_size.var != 0); - } - } - - - /* push overlapped argument to stack. */ - push_overlaps(args, num_actuals); - - /* determine ordering to store arguments. - and generate RTL that store some variable temporary, if it needed.*/ - /* now... this function do nothing. */ - determine_order(store_order, num_actuals); - - /* push arguments in the order . */ - for (i = 0; i < num_actuals; i++) - { - if (args[store_order[i]].reg == 0 - || args[store_order[i]].pass_on_stack - || args[store_order[i]].partial!=0 ) - { - expand_one_arg_push (&args[store_order[i]], argblock, flags, - adjusted_args_size.var != 0, - reg_parm_stack_space); - } - } - - - /* If register arguments require space on the stack and stack space - was not preallocated, allocate stack space here for arguments - passed in registers. */ -#ifdef OUTGOING_REG_PARM_STACK_SPACE - //if (!ACCUMULATE_OUTGOING_ARGS - //&& must_preallocate == 0 && reg_parm_stack_space > 0) - //anti_adjust_stack (GEN_INT (reg_parm_stack_space)); -#endif - - /* */ - funexp = prepare_call_address (fndecl, funexp, static_chain_value, - &call_fusage, reg_parm_seen, pass == 0); - - /* store args into register. */ - load_register_parameters (args, num_actuals, &call_fusage, flags, - //pass == 0, &sibcall_failure); - 0, NULL); - - /* Save a pointer to the last insn before the call, so that we can - later safely search backwards to find the CALL_INSN. */ - before_call = get_last_insn (); - - /* Set up next argument register. For sibling calls on machines - with register windows this should be the incoming register. */ -#ifdef FUNCTION_INCOMING_ARG - if (pass == 0) - next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, - void_type_node, 1); - else -#endif - next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, - void_type_node, 1); - - /* All arguments and registers used for the call must be set up by - now! */ - - /* Stack must be properly aligned now. */ - gcc_assert (!pass - || !(stack_pointer_delta % preferred_unit_stack_boundary)); -#if 0 - /* store environment. */ - if ( env_tree!=NULL ) - { - emit_insn (gen_rtx_CLOBBER (VOIDmode, - gen_rtx_MEM (BLKmode, - hard_frame_pointer_rtx))); - emit_move_insn (hard_frame_pointer_rtx, env_rtx); - emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); - //pop_temp_slots (); - - emit_indirect_jump (funexp); - } -#endif - if (GET_CODE (funexp) != SYMBOL_REF) - { - push_temp_slots(); - preserve_temp_slots(funexp); - /* Generate the actual call instruction. */ - emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, - adjusted_args_size.constant, struct_value_size, - //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, - next_arg_reg, valreg, 0, call_fusage, - flags, & args_so_far); - pop_temp_slots(); - } - else - { - - /* Generate the actual call instruction. */ - emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, - adjusted_args_size.constant, struct_value_size, - //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, - next_arg_reg, valreg, 0, call_fusage, - flags, & args_so_far); - } - - /* If a non-BLKmode value is returned at the most significant end - of a register, shift the register right by the appropriate amount - and update VALREG accordingly. BLKmode values are handled by the - group load/store machinery below. */ - if (!structure_value_addr - && !pcc_struct_value - && TYPE_MODE (TREE_TYPE (exp)) != BLKmode - && targetm.calls.return_in_msb (TREE_TYPE (exp))) - { - if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) - sibcall_failure = 1; - valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); - } - - - /* For calls to `setjmp', etc., inform flow.c it should complain - if nonvolatile values are live. For functions that cannot return, - inform flow that control does not fall through. */ - - if ((flags & ECF_NORETURN) || pass == 0) - { - /* The barrier must be emitted - immediately after the CALL_INSN. Some ports emit more - than just a CALL_INSN above, so we must search for it here. */ - - rtx last = get_last_insn (); - while (!CALL_P (last)) - { - last = PREV_INSN (last); - /* There was no CALL_INSN? */ - gcc_assert (last != before_call); - } - - emit_barrier_after (last); - - /* Stack adjustments after a noreturn call are dead code. - However when NO_DEFER_POP is in effect, we must preserve - stack_pointer_delta. */ - if (inhibit_defer_pop == 0) - { - stack_pointer_delta = old_stack_allocated; - pending_stack_adjust = 0; - } - } - - /* If value type not void, return an rtx for the value. */ - - if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode - || ignore) - target = const0_rtx; - - tree type = TREE_TYPE (exp); - int unsignedp = TYPE_UNSIGNED (type); - - /* if (targetm.calls.promote_function_return(funtype))*/ - if (promote_function_mode(type, TYPE_MODE (type), &unsignedp, - fndecl ? TREE_TYPE (fndecl) : fntype, 0)) - { - /* If we promoted this return value, make the proper SUBREG. - TARGET might be const0_rtx here, so be careful. */ - if (REG_P (target) - && TYPE_MODE (TREE_TYPE (exp)) != BLKmode - && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) - { - int offset = 0; - enum machine_mode pmode; - - pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);//, 1); - /* If we don't promote as expected, something is wrong. */ - gcc_assert (GET_MODE (target) == pmode); - - if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) - && (GET_MODE_SIZE (GET_MODE (target)) - > GET_MODE_SIZE (TYPE_MODE (type)))) - { - offset = GET_MODE_SIZE (GET_MODE (target)) - - GET_MODE_SIZE (TYPE_MODE (type)); - if (! BYTES_BIG_ENDIAN) - offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; - else if (! WORDS_BIG_ENDIAN) - offset %= UNITS_PER_WORD; - } - target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); - SUBREG_PROMOTED_VAR_P (target) = 1; - SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); - } - } - - /* If size of args is variable or this was a constructor call for a stack - argument, restore saved stack-pointer value. */ - - if (old_stack_level) - { - emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); - stack_pointer_delta = old_stack_pointer_delta; - pending_stack_adjust = old_pending_adj; - old_stack_allocated = stack_pointer_delta - pending_stack_adjust; - stack_arg_under_construction = old_stack_arg_under_construction; - highest_outgoing_arg_in_use = initial_highest_arg_in_use; - stack_usage_map = initial_stack_usage_map; - } - - /* If this was alloca, record the new stack level for nonlocal gotos. - Check for the handler slots since we might not have a save area - for non-local gotos. */ - - if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) - update_nonlocal_goto_save_area (); - - /* Free up storage we no longer need. */ - for (i = 0; i < num_actuals; ++i) - if (args[i].aligned_regs) - free (args[i].aligned_regs); - - insns = get_insns (); - end_sequence (); - - tail_call_insns = insns; - - /* Restore the pending stack adjustment now that we have - finished generating the sibling call sequence. */ - - pending_stack_adjust = save_pending_stack_adjust; - stack_pointer_delta = save_stack_pointer_delta; - - /* Prepare arg structure for next iteration. */ - for (i = 0; i < num_actuals; i++) - { - args[i].value = 0; - args[i].aligned_regs = 0; - args[i].stack = 0; - } - - sbitmap_free (stored_args_map); - - - emit_insn(tail_call_insns); - crtl->tail_call_emit = true; - - return target; -} - - -static void -preexpand_argument_expr (struct arg_data *arg, - int variable_size ATTRIBUTE_UNUSED) -{ - tree pval = arg->tree_value; - rtx reg = 0; - int partial = 0; - - if (TREE_CODE (pval) == ERROR_MARK) - return; - - /* Push a new temporary level for any temporaries we make for - this argument. */ - push_temp_slots (); - - - /* If this isn't going to be placed on both the stack and in registers, - set up the register and number of words. */ - if (! arg->pass_on_stack) - { - //if (flags & ECF_SIBCALL) - reg = arg->tail_call_reg; - //else - //reg = arg->reg; - partial = arg->partial; - } - - /* Being passed entirely in a register. We shouldn't be called in - this case. */ - gcc_assert (reg == 0 || partial != 0); - - /* If this arg needs special alignment, don't load the registers - here. */ - if (arg->n_aligned_regs != 0) - reg = 0; - - /* Start a new sequence for the arg->exprs. */ - start_sequence (); - - - if (arg->pass_on_stack) - stack_arg_under_construction++; - - arg->value = expand_expr (pval, - (partial - || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) - ? NULL_RTX : arg->stack, - VOIDmode, EXPAND_STACK_PARM); - - /* If we are promoting object (or for any other reason) the mode - doesn't agree, convert the mode. */ - - if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) - arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), - arg->value, arg->unsignedp); - - if (arg->pass_on_stack) - stack_arg_under_construction--; - - arg->exprs = get_insns (); - end_sequence (); - - if (arg->exprs) emit_insn(arg->exprs); - - preserve_temp_slots (arg->value); - pop_temp_slots (); - - return ; -} - -static int -expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags, - int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) -{ - tree pval = arg->tree_value; - int used = 0; - int i, lower_bound = 0, upper_bound = 0; - rtx reg = 0; - int partial = 0; - - /* Push a new temporary level for any temporaries we make for - this argument. */ - push_temp_slots (); - - - /* copy from store_one_arg. modify here after.*/ - /* If this isn't going to be placed on both the stack and in registers, - set up the register and number of words. */ - if (! arg->pass_on_stack) - { - //if (flags & ECF_SIBCALL) - reg = arg->tail_call_reg; - //else - //reg = arg->reg; - partial = arg->partial; - } - /* Being passed entirely in a register. We shouldn't be called in - this case. */ - gcc_assert (reg == 0 || partial != 0); - /* If this arg needs special alignment, don't load the registers - here. */ - if (arg->n_aligned_regs != 0) - reg = 0; - - - - - if (arg->value == arg->stack) - /* If the value is already in the stack slot, we are done. */ - ; - else if (arg->mode != BLKmode) - { - int size; - - /* Argument is a scalar, not entirely passed in registers. - (If part is passed in registers, arg->partial says how much - and emit_push_insn will take care of putting it there.) - - Push it, and if its size is less than the - amount of space allocated to it, - also bump stack pointer by the additional space. - Note that in C the default argument promotions - will prevent such mismatches. */ - - size = GET_MODE_SIZE (arg->mode); - /* Compute how much space the push instruction will push. - On many machines, pushing a byte will advance the stack - pointer by a halfword. */ -#ifdef PUSH_ROUNDING - size = PUSH_ROUNDING (size); -#endif - used = size; - - /* Compute how much space the argument should get: - round up to a multiple of the alignment for arguments. */ - if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) - used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) - / (PARM_BOUNDARY / BITS_PER_UNIT)) - * (PARM_BOUNDARY / BITS_PER_UNIT)); - - /* This isn't already where we want it on the stack, so put it there. - This can either be done with push or copy insns. */ - emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, - PARM_BOUNDARY, partial, reg, used - size, argblock, - ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->locate.alignment_pad)); - - /* Unless this is a partially-in-register argument, the argument is now - in the stack. */ - if (partial == 0) - arg->value = arg->stack; - } - else - { - /* BLKmode, at least partly to be pushed. */ - - unsigned int parm_align; - int excess; - rtx size_rtx; - - /* Pushing a nonscalar. - If part is passed in registers, PARTIAL says how much - and emit_push_insn will take care of putting it there. */ - - /* Round its size up to a multiple - of the allocation unit for arguments. */ - - if (arg->locate.size.var != 0) - { - excess = 0; - size_rtx = ARGS_SIZE_RTX (arg->locate.size); - } - else - { - /* PUSH_ROUNDING has no effect on us, because emit_push_insn - for BLKmode is careful to avoid it. */ - excess = (arg->locate.size.constant - - int_size_in_bytes (TREE_TYPE (pval)) - + partial); - size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), - NULL_RTX, TYPE_MODE (sizetype), 0); - } - - parm_align = arg->locate.boundary; - - /* When an argument is padded down, the block is aligned to - PARM_BOUNDARY, but the actual argument isn't. */ - if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) - { - if (arg->locate.size.var) - parm_align = BITS_PER_UNIT; - else if (excess) - { - unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; - parm_align = MIN (parm_align, excess_align); - } - } - - if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) - { - /* emit_push_insn might not work properly if arg->value and - argblock + arg->locate.offset areas overlap. */ - rtx x = arg->value; - int i = 0; - - if (XEXP (x, 0) == crtl->args.internal_arg_pointer - || (GET_CODE (XEXP (x, 0)) == PLUS - && XEXP (XEXP (x, 0), 0) == - crtl->args.internal_arg_pointer - && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) - { - if (XEXP (x, 0) != crtl->args.internal_arg_pointer) - i = INTVAL (XEXP (XEXP (x, 0), 1)); - - /* expand_call should ensure this. */ - gcc_assert (!arg->locate.offset.var - && GET_CODE (size_rtx) == CONST_INT); - } - } - - emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, - parm_align, partial, reg, excess, argblock, - ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->locate.alignment_pad)); - - /* Unless this is a partially-in-register argument, the argument is now - in the stack. - - ??? Unlike the case above, in which we want the actual - address of the data, so that we can load it directly into a - register, here we want the address of the stack slot, so that - it's properly aligned for word-by-word copying or something - like that. It's not clear that this is always correct. */ - if (partial == 0) - arg->value = arg->stack_slot; - } - - if (arg->reg && GET_CODE (arg->reg) == PARALLEL) - { - tree type = TREE_TYPE (arg->tree_value); - arg->parallel_value - = emit_group_load_into_temps (arg->reg, arg->value, type, - int_size_in_bytes (type)); - } - - /* Mark all slots this store used. */ - if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) - && argblock && ! variable_size && arg->stack) - for (i = lower_bound; i < upper_bound; i++) - stack_usage_map[i] = 1; - - /* Once we have pushed something, pops can't safely - be deferred during the rest of the arguments. */ - NO_DEFER_POP; - - /* Free any temporary slots made in processing this argument. Show - that we might have taken the address of something and pushed that - as an operand. */ - preserve_temp_slots (NULL_RTX); - free_temp_slots (); - pop_temp_slots (); - - return 0; -} - -static void -determine_order(int *order, int num_actuals) -{ - int i; - for (i=0; iargs.internal_arg_pointer) - i = 0; - else if (GET_CODE (addr) == PLUS - && XEXP (addr, 0) == crtl->args.internal_arg_pointer - && GET_CODE (XEXP (addr, 1)) == CONST_INT) - i = INTVAL (XEXP (addr, 1)); - else if (GET_CODE (addr) == PLUS - && GET_CODE (XEXP (addr, 0)) == CONST_INT - && XEXP (addr, 1) == crtl->args.internal_arg_pointer ) - i = INTVAL (XEXP (addr, 0)); - else - return -1; - - return i; -} - diff -r bd49c42ec43e -r 60c1b2f8487a gcc/cbc-tree.h --- a/gcc/cbc-tree.h Mon Feb 15 17:39:45 2010 +0900 +++ b/gcc/cbc-tree.h Sun Apr 25 17:03:45 2010 +0900 @@ -1,9 +1,6 @@ - //#define CbC_PRETENDED_STACK_SIZE 256 #define CbC_PRETENDED_STACK_SIZE 1024 - - /* Set if the fntype is code segment on CbC language. */ // flag3,5,6 has been used by c-tree.h #define CbC_IS_CODE_SEGMENT(TYPE) TYPE_LANG_FLAG_5 ( FUNCTION_TYPE_CHECK(TYPE)) @@ -15,6 +12,3 @@ extern tree cbc_return_f; extern tree cbc_env; extern location_t cbc_return; - - -