comparison gcc/calls.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents 855418dad1a3
children 326d9e06c2e3 b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
37 #include "tm_p.h" 37 #include "tm_p.h"
38 #include "timevar.h" 38 #include "timevar.h"
39 #include "sbitmap.h" 39 #include "sbitmap.h"
40 #include "langhooks.h" 40 #include "langhooks.h"
41 #include "target.h" 41 #include "target.h"
42 #include "debug.h"
42 #include "cgraph.h" 43 #include "cgraph.h"
43 #include "except.h" 44 #include "except.h"
44 #include "dbgcnt.h" 45 #include "dbgcnt.h"
45 #include "tree-flow.h" 46 #include "tree-flow.h"
46 47
164 165
165 CALL_FUSAGE points to a variable holding the prospective 166 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */ 167 CALL_INSN_FUNCTION_USAGE information. */
167 168
168 rtx 169 rtx
169 prepare_call_address (rtx funexp, rtx static_chain_value, 170 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
170 rtx *call_fusage, int reg_parm_seen, int sibcallp) 171 rtx *call_fusage, int reg_parm_seen, int sibcallp)
171 { 172 {
172 /* Make a valid memory address and copy constants through pseudo-regs, 173 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */ 174 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp) != SYMBOL_REF) 175 if (GET_CODE (funexp) != SYMBOL_REF)
185 #endif 186 #endif
186 } 187 }
187 188
188 if (static_chain_value != 0) 189 if (static_chain_value != 0)
189 { 190 {
191 rtx chain;
192
193 gcc_assert (fndecl);
194 chain = targetm.calls.static_chain (fndecl, false);
190 static_chain_value = convert_memory_address (Pmode, static_chain_value); 195 static_chain_value = convert_memory_address (Pmode, static_chain_value);
191 emit_move_insn (static_chain_rtx, static_chain_value); 196
192 197 emit_move_insn (chain, static_chain_value);
193 if (REG_P (static_chain_rtx)) 198 if (REG_P (chain))
194 use_reg (call_fusage, static_chain_rtx); 199 use_reg (call_fusage, chain);
195 } 200 }
196 201
197 return funexp; 202 return funexp;
198 } 203 }
199 204
236 241
237 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that 242 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
238 denote registers used by the called function. */ 243 denote registers used by the called function. */
239 244
240 static void 245 static void
241 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED, 246 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
242 tree funtype ATTRIBUTE_UNUSED, 247 tree funtype ATTRIBUTE_UNUSED,
243 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, 248 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
244 HOST_WIDE_INT rounded_stack_size, 249 HOST_WIDE_INT rounded_stack_size,
245 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, 250 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
246 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, 251 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
249 { 254 {
250 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); 255 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
251 rtx call_insn; 256 rtx call_insn;
252 int already_popped = 0; 257 int already_popped = 0;
253 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size); 258 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
254 #if defined (HAVE_call) && defined (HAVE_call_value)
255 rtx struct_value_size_rtx;
256 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #endif
258 259
259 #ifdef CALL_POPS_ARGS 260 #ifdef CALL_POPS_ARGS
260 n_popped += CALL_POPS_ARGS (* args_so_far); 261 n_popped += CALL_POPS_ARGS (* args_so_far);
261 #endif 262 #endif
262 263
334 rounded_stack_size_rtx, 335 rounded_stack_size_rtx,
335 next_arg_reg, NULL_RTX)); 336 next_arg_reg, NULL_RTX));
336 else 337 else
337 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), 338 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
338 rounded_stack_size_rtx, next_arg_reg, 339 rounded_stack_size_rtx, next_arg_reg,
339 struct_value_size_rtx)); 340 GEN_INT (struct_value_size)));
340 } 341 }
341 else 342 else
342 #endif 343 #endif
343 344
344 #if defined (HAVE_call) && defined (HAVE_call_value) 345 #if defined (HAVE_call) && defined (HAVE_call_value)
350 rounded_stack_size_rtx, next_arg_reg, 351 rounded_stack_size_rtx, next_arg_reg,
351 NULL_RTX)); 352 NULL_RTX));
352 else 353 else
353 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), 354 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
354 rounded_stack_size_rtx, next_arg_reg, 355 rounded_stack_size_rtx, next_arg_reg,
355 struct_value_size_rtx)); 356 GEN_INT (struct_value_size)));
356 } 357 }
357 else 358 else
358 #endif 359 #endif
359 gcc_unreachable (); 360 gcc_unreachable ();
360 361
374 375
375 /* If this is a const call, then set the insn's unchanging bit. */ 376 /* If this is a const call, then set the insn's unchanging bit. */
376 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE) 377 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
377 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1; 378 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
378 379
379 /* If this call can't throw, attach a REG_EH_REGION reg note to that 380 /* Create a nothrow REG_EH_REGION note, if needed. */
380 effect. */ 381 make_reg_eh_region_note (call_insn, ecf_flags, 0);
381 if (ecf_flags & ECF_NOTHROW)
382 add_reg_note (call_insn, REG_EH_REGION, const0_rtx);
383 else
384 {
385 int rn = lookup_expr_eh_region (fntree);
386
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
389 if (rn > 0)
390 add_reg_note (call_insn, REG_EH_REGION, GEN_INT (rn));
391 }
392 382
393 if (ecf_flags & ECF_NORETURN) 383 if (ecf_flags & ECF_NORETURN)
394 add_reg_note (call_insn, REG_NORETURN, const0_rtx); 384 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
395 385
396 if (ecf_flags & ECF_RETURNS_TWICE) 386 if (ecf_flags & ECF_RETURNS_TWICE)
398 add_reg_note (call_insn, REG_SETJMP, const0_rtx); 388 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
399 cfun->calls_setjmp = 1; 389 cfun->calls_setjmp = 1;
400 } 390 }
401 391
402 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0); 392 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
393
394 /* Record debug information for virtual calls. */
395 if (flag_enable_icf_debug && fndecl == NULL)
396 (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
397 INSN_UID (call_insn));
403 398
404 /* Restore this now, so that we do defer pops for this call's args 399 /* Restore this now, so that we do defer pops for this call's args
405 if the context of the call as a whole permits. */ 400 if the context of the call as a whole permits. */
406 inhibit_defer_pop = old_inhibit_defer_pop; 401 inhibit_defer_pop = old_inhibit_defer_pop;
407 402
588 583
589 int 584 int
590 flags_from_decl_or_type (const_tree exp) 585 flags_from_decl_or_type (const_tree exp)
591 { 586 {
592 int flags = 0; 587 int flags = 0;
593 const_tree type = exp;
594 588
595 if (DECL_P (exp)) 589 if (DECL_P (exp))
596 { 590 {
597 type = TREE_TYPE (exp);
598
599 /* The function exp may have the `malloc' attribute. */ 591 /* The function exp may have the `malloc' attribute. */
600 if (DECL_IS_MALLOC (exp)) 592 if (DECL_IS_MALLOC (exp))
601 flags |= ECF_MALLOC; 593 flags |= ECF_MALLOC;
602 594
603 /* The function exp may have the `returns_twice' attribute. */ 595 /* The function exp may have the `returns_twice' attribute. */
905 } 897 }
906 } 898 }
907 } 899 }
908 900
909 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in 901 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
910 CALL_EXPR EXP. 902 CALL_EXPR EXP.
911 903
912 NUM_ACTUALS is the total number of parameters. 904 NUM_ACTUALS is the total number of parameters.
913 905
914 N_NAMED_ARGS is the total number of named arguments. 906 N_NAMED_ARGS is the total number of named arguments.
915 907
947 int reg_parm_stack_space, 939 int reg_parm_stack_space,
948 rtx *old_stack_level, int *old_pending_adj, 940 rtx *old_stack_level, int *old_pending_adj,
949 int *must_preallocate, int *ecf_flags, 941 int *must_preallocate, int *ecf_flags,
950 bool *may_tailcall, bool call_from_thunk_p) 942 bool *may_tailcall, bool call_from_thunk_p)
951 { 943 {
944 location_t loc = EXPR_LOCATION (exp);
952 /* 1 if scanning parms front to back, -1 if scanning back to front. */ 945 /* 1 if scanning parms front to back, -1 if scanning back to front. */
953 int inc; 946 int inc;
954 947
955 /* Count arg position in order args appear. */ 948 /* Count arg position in order args appear. */
956 int argpos; 949 int argpos;
1052 instead of making a copy. */ 1045 instead of making a copy. */
1053 if (call_from_thunk_p 1046 if (call_from_thunk_p
1054 || (callee_copies 1047 || (callee_copies
1055 && !TREE_ADDRESSABLE (type) 1048 && !TREE_ADDRESSABLE (type)
1056 && (base = get_base_address (args[i].tree_value)) 1049 && (base = get_base_address (args[i].tree_value))
1050 && TREE_CODE (base) != SSA_NAME
1057 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))) 1051 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1058 { 1052 {
1059 /* We can't use sibcalls if a callee-copied argument is 1053 /* We can't use sibcalls if a callee-copied argument is
1060 stored in the current function's frame. */ 1054 stored in the current function's frame. */
1061 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) 1055 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1062 *may_tailcall = false; 1056 *may_tailcall = false;
1063 1057
1064 args[i].tree_value = build_fold_addr_expr (args[i].tree_value); 1058 args[i].tree_value = build_fold_addr_expr_loc (loc,
1059 args[i].tree_value);
1065 type = TREE_TYPE (args[i].tree_value); 1060 type = TREE_TYPE (args[i].tree_value);
1066 1061
1067 if (*ecf_flags & ECF_CONST) 1062 if (*ecf_flags & ECF_CONST)
1068 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); 1063 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1069 } 1064 }
1111 1106
1112 if (!callee_copies && *ecf_flags & ECF_PURE) 1107 if (!callee_copies && *ecf_flags & ECF_PURE)
1113 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); 1108 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1114 1109
1115 args[i].tree_value 1110 args[i].tree_value
1116 = build_fold_addr_expr (make_tree (type, copy)); 1111 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1117 type = TREE_TYPE (args[i].tree_value); 1112 type = TREE_TYPE (args[i].tree_value);
1118 *may_tailcall = false; 1113 *may_tailcall = false;
1119 } 1114 }
1120 } 1115 }
1121 1116
1122 mode = TYPE_MODE (type);
1123 unsignedp = TYPE_UNSIGNED (type); 1117 unsignedp = TYPE_UNSIGNED (type);
1124 1118 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1125 if (targetm.calls.promote_function_args (fndecl 1119 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1126 ? TREE_TYPE (fndecl)
1127 : fntype))
1128 mode = promote_mode (type, mode, &unsignedp, 1);
1129 1120
1130 args[i].unsignedp = unsignedp; 1121 args[i].unsignedp = unsignedp;
1131 args[i].mode = mode; 1122 args[i].mode = mode;
1132 1123
1133 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type, 1124 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1303 if (!ACCUMULATE_OUTGOING_ARGS) 1294 if (!ACCUMULATE_OUTGOING_ARGS)
1304 return; 1295 return;
1305 1296
1306 for (i = 0; i < num_actuals; i++) 1297 for (i = 0; i < num_actuals; i++)
1307 { 1298 {
1299 tree type;
1308 enum machine_mode mode; 1300 enum machine_mode mode;
1309 1301
1310 if (TREE_CODE (args[i].tree_value) != CALL_EXPR) 1302 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1311 continue; 1303 continue;
1312 1304
1313 /* If this is an addressable type, we cannot pre-evaluate it. */ 1305 /* If this is an addressable type, we cannot pre-evaluate it. */
1314 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))); 1306 type = TREE_TYPE (args[i].tree_value);
1307 gcc_assert (!TREE_ADDRESSABLE (type));
1315 1308
1316 args[i].initial_value = args[i].value 1309 args[i].initial_value = args[i].value
1317 = expand_normal (args[i].tree_value); 1310 = expand_normal (args[i].tree_value);
1318 1311
1319 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value)); 1312 mode = TYPE_MODE (type);
1320 if (mode != args[i].mode) 1313 if (mode != args[i].mode)
1321 { 1314 {
1315 int unsignedp = args[i].unsignedp;
1322 args[i].value 1316 args[i].value
1323 = convert_modes (args[i].mode, mode, 1317 = convert_modes (args[i].mode, mode,
1324 args[i].value, args[i].unsignedp); 1318 args[i].value, args[i].unsignedp);
1325 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE) 1319
1326 /* CSE will replace this only if it contains args[i].value 1320 /* CSE will replace this only if it contains args[i].value
1327 pseudo, so convert it down to the declared mode using 1321 pseudo, so convert it down to the declared mode using
1328 a SUBREG. */ 1322 a SUBREG. */
1329 if (REG_P (args[i].value) 1323 if (REG_P (args[i].value)
1330 && GET_MODE_CLASS (args[i].mode) == MODE_INT) 1324 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1325 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1331 { 1326 {
1332 args[i].initial_value 1327 args[i].initial_value
1333 = gen_lowpart_SUBREG (mode, args[i].value); 1328 = gen_lowpart_SUBREG (mode, args[i].value);
1334 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; 1329 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1335 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, 1330 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1336 args[i].unsignedp); 1331 args[i].unsignedp);
1337 } 1332 }
1338 #endif
1339 } 1333 }
1340 } 1334 }
1341 } 1335 }
1342 1336
1343 /* Given the current state of MUST_PREALLOCATE and information about 1337 /* Given the current state of MUST_PREALLOCATE and information about
1344 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE, 1338 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1345 compute and return the final value for MUST_PREALLOCATE. */ 1339 compute and return the final value for MUST_PREALLOCATE. */
1346 1340
1347 static int 1341 static int
1348 finalize_must_preallocate (int must_preallocate, int num_actuals, 1342 finalize_must_preallocate (int must_preallocate, int num_actuals,
1349 struct arg_data *args, struct args_size *args_size) 1343 struct arg_data *args, struct args_size *args_size)
1350 { 1344 {
1351 /* See if we have or want to preallocate stack space. 1345 /* See if we have or want to preallocate stack space.
1352 1346
1353 If we would have to push a partially-in-regs parm 1347 If we would have to push a partially-in-regs parm
1428 if (! args[i].pass_on_stack 1422 if (! args[i].pass_on_stack
1429 && args[i].reg != 0 1423 && args[i].reg != 0
1430 && args[i].partial == 0) 1424 && args[i].partial == 0)
1431 continue; 1425 continue;
1432 1426
1433 if (GET_CODE (offset) == CONST_INT) 1427 if (CONST_INT_P (offset))
1434 addr = plus_constant (arg_reg, INTVAL (offset)); 1428 addr = plus_constant (arg_reg, INTVAL (offset));
1435 else 1429 else
1436 addr = gen_rtx_PLUS (Pmode, arg_reg, offset); 1430 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1437 1431
1438 addr = plus_constant (addr, arg_offset); 1432 addr = plus_constant (addr, arg_offset);
1455 } 1449 }
1456 align = BITS_PER_UNIT; 1450 align = BITS_PER_UNIT;
1457 boundary = args[i].locate.boundary; 1451 boundary = args[i].locate.boundary;
1458 if (args[i].locate.where_pad != downward) 1452 if (args[i].locate.where_pad != downward)
1459 align = boundary; 1453 align = boundary;
1460 else if (GET_CODE (offset) == CONST_INT) 1454 else if (CONST_INT_P (offset))
1461 { 1455 {
1462 align = INTVAL (offset) * BITS_PER_UNIT | boundary; 1456 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1463 align = align & -align; 1457 align = align & -align;
1464 } 1458 }
1465 set_mem_align (args[i].stack, align); 1459 set_mem_align (args[i].stack, align);
1466 1460
1467 if (GET_CODE (slot_offset) == CONST_INT) 1461 if (CONST_INT_P (slot_offset))
1468 addr = plus_constant (arg_reg, INTVAL (slot_offset)); 1462 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1469 else 1463 else
1470 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); 1464 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1471 1465
1472 addr = plus_constant (addr, arg_offset); 1466 addr = plus_constant (addr, arg_offset);
1546 1540
1547 if (addr == crtl->args.internal_arg_pointer) 1541 if (addr == crtl->args.internal_arg_pointer)
1548 i = 0; 1542 i = 0;
1549 else if (GET_CODE (addr) == PLUS 1543 else if (GET_CODE (addr) == PLUS
1550 && XEXP (addr, 0) == crtl->args.internal_arg_pointer 1544 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1551 && GET_CODE (XEXP (addr, 1)) == CONST_INT) 1545 && CONST_INT_P (XEXP (addr, 1)))
1552 i = INTVAL (XEXP (addr, 1)); 1546 i = INTVAL (XEXP (addr, 1));
1553 /* Return true for arg pointer based indexed addressing. */ 1547 /* Return true for arg pointer based indexed addressing. */
1554 else if (GET_CODE (addr) == PLUS 1548 else if (GET_CODE (addr) == PLUS
1555 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer 1549 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1556 || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) 1550 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1923 /* Sequence of insns to perform a tail "call". */ 1917 /* Sequence of insns to perform a tail "call". */
1924 rtx tail_call_insns = NULL_RTX; 1918 rtx tail_call_insns = NULL_RTX;
1925 /* Data type of the function. */ 1919 /* Data type of the function. */
1926 tree funtype; 1920 tree funtype;
1927 tree type_arg_types; 1921 tree type_arg_types;
1922 tree rettype;
1928 /* Declaration of the function being called, 1923 /* Declaration of the function being called,
1929 or 0 if the function is computed (not known by name). */ 1924 or 0 if the function is computed (not known by name). */
1930 tree fndecl = 0; 1925 tree fndecl = 0;
1931 /* The type of the function being called. */ 1926 /* The type of the function being called. */
1932 tree fntype; 1927 tree fntype;
2018 allocate_dynamic_stack_space. This modifies the stack_pointer_delta, 2013 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2019 which we then also need to save/restore along the way. */ 2014 which we then also need to save/restore along the way. */
2020 int old_stack_pointer_delta = 0; 2015 int old_stack_pointer_delta = 0;
2021 2016
2022 rtx call_fusage; 2017 rtx call_fusage;
2023 tree p = CALL_EXPR_FN (exp);
2024 tree addr = CALL_EXPR_FN (exp); 2018 tree addr = CALL_EXPR_FN (exp);
2025 int i; 2019 int i;
2026 /* The alignment of the stack, in bits. */ 2020 /* The alignment of the stack, in bits. */
2027 unsigned HOST_WIDE_INT preferred_stack_boundary; 2021 unsigned HOST_WIDE_INT preferred_stack_boundary;
2028 /* The alignment of the stack, in bytes. */ 2022 /* The alignment of the stack, in bytes. */
2041 fntype = TREE_TYPE (fndecl); 2035 fntype = TREE_TYPE (fndecl);
2042 flags |= flags_from_decl_or_type (fndecl); 2036 flags |= flags_from_decl_or_type (fndecl);
2043 } 2037 }
2044 else 2038 else
2045 { 2039 {
2046 fntype = TREE_TYPE (TREE_TYPE (p)); 2040 fntype = TREE_TYPE (TREE_TYPE (addr));
2047 flags |= flags_from_decl_or_type (fntype); 2041 flags |= flags_from_decl_or_type (fntype);
2048 } 2042 }
2043 rettype = TREE_TYPE (exp);
2049 2044
2050 struct_value = targetm.calls.struct_value_rtx (fntype, 0); 2045 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2051 2046
2052 /* Warn if this value is an aggregate type, 2047 /* Warn if this value is an aggregate type,
2053 regardless of which calling convention we are using for it. */ 2048 regardless of which calling convention we are using for it. */
2054 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))) 2049 if (AGGREGATE_TYPE_P (rettype))
2055 warning (OPT_Waggregate_return, "function call has aggregate value"); 2050 warning (OPT_Waggregate_return, "function call has aggregate value");
2056 2051
2057 /* If the result of a non looping pure or const function call is 2052 /* If the result of a non looping pure or const function call is
2058 ignored (or void), and none of its arguments are volatile, we can 2053 ignored (or void), and none of its arguments are volatile, we can
2059 avoid expanding the call and just evaluate the arguments for 2054 avoid expanding the call and just evaluate the arguments for
2060 side-effects. */ 2055 side-effects. */
2061 if ((flags & (ECF_CONST | ECF_PURE)) 2056 if ((flags & (ECF_CONST | ECF_PURE))
2062 && (!(flags & ECF_LOOPING_CONST_OR_PURE)) 2057 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2063 && (ignore || target == const0_rtx 2058 && (ignore || target == const0_rtx
2064 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)) 2059 || TYPE_MODE (rettype) == VOIDmode))
2065 { 2060 {
2066 bool volatilep = false; 2061 bool volatilep = false;
2067 tree arg; 2062 tree arg;
2068 call_expr_arg_iterator iter; 2063 call_expr_arg_iterator iter;
2069 2064
2102 { 2097 {
2103 pcc_struct_value = 1; 2098 pcc_struct_value = 1;
2104 } 2099 }
2105 #else /* not PCC_STATIC_STRUCT_RETURN */ 2100 #else /* not PCC_STATIC_STRUCT_RETURN */
2106 { 2101 {
2107 struct_value_size = int_size_in_bytes (TREE_TYPE (exp)); 2102 struct_value_size = int_size_in_bytes (rettype);
2108 2103
2109 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) 2104 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2110 structure_value_addr = XEXP (target, 0); 2105 structure_value_addr = XEXP (target, 0);
2111 else 2106 else
2112 { 2107 {
2113 /* For variable-sized objects, we must be called with a target 2108 /* For variable-sized objects, we must be called with a target
2114 specified. If we were to allocate space on the stack here, 2109 specified. If we were to allocate space on the stack here,
2115 we would have no way of knowing when to free it. */ 2110 we would have no way of knowing when to free it. */
2116 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1); 2111 rtx d = assign_temp (rettype, 0, 1, 1);
2117 2112
2118 mark_temp_addr_taken (d); 2113 mark_temp_addr_taken (d);
2119 structure_value_addr = XEXP (d, 0); 2114 structure_value_addr = XEXP (d, 0);
2120 target = 0; 2115 target = 0;
2121 } 2116 }
2282 there's cleanups, as we know there's code to follow the call. */ 2277 there's cleanups, as we know there's code to follow the call. */
2283 2278
2284 if (currently_expanding_call++ != 0 2279 if (currently_expanding_call++ != 0
2285 || !flag_optimize_sibling_calls 2280 || !flag_optimize_sibling_calls
2286 || args_size.var 2281 || args_size.var
2287 || lookup_expr_eh_region (exp) >= 0
2288 || dbg_cnt (tail_call) == false) 2282 || dbg_cnt (tail_call) == false)
2289 try_tail_call = 0; 2283 try_tail_call = 0;
2290 2284
2291 /* Rest of purposes for tail call optimizations to fail. */ 2285 /* Rest of purposes for tail call optimizations to fail. */
2292 if ( 2286 if (
2341 enum machine_mode callee_mode, callee_promoted_mode; 2335 enum machine_mode callee_mode, callee_promoted_mode;
2342 int caller_unsignedp, callee_unsignedp; 2336 int caller_unsignedp, callee_unsignedp;
2343 tree caller_res = DECL_RESULT (current_function_decl); 2337 tree caller_res = DECL_RESULT (current_function_decl);
2344 2338
2345 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res)); 2339 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2346 caller_mode = caller_promoted_mode = DECL_MODE (caller_res); 2340 caller_mode = DECL_MODE (caller_res);
2347 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); 2341 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2348 callee_mode = callee_promoted_mode = TYPE_MODE (TREE_TYPE (funtype)); 2342 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2349 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) 2343 caller_promoted_mode
2350 caller_promoted_mode 2344 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2351 = promote_mode (TREE_TYPE (caller_res), caller_mode, 2345 &caller_unsignedp,
2352 &caller_unsignedp, 1); 2346 TREE_TYPE (current_function_decl), 1);
2353 if (targetm.calls.promote_function_return (funtype)) 2347 callee_promoted_mode
2354 callee_promoted_mode 2348 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2355 = promote_mode (TREE_TYPE (funtype), callee_mode, 2349 &callee_unsignedp,
2356 &callee_unsignedp, 1); 2350 funtype, 1);
2357 if (caller_mode != VOIDmode 2351 if (caller_mode != VOIDmode
2358 && (caller_promoted_mode != callee_promoted_mode 2352 && (caller_promoted_mode != callee_promoted_mode
2359 || ((caller_mode != caller_promoted_mode 2353 || ((caller_mode != caller_promoted_mode
2360 || callee_mode != callee_promoted_mode) 2354 || callee_mode != callee_promoted_mode)
2361 && (caller_unsignedp != callee_unsignedp 2355 && (caller_unsignedp != callee_unsignedp
2689 2683
2690 funexp = rtx_for_function_call (fndecl, addr); 2684 funexp = rtx_for_function_call (fndecl, addr);
2691 2685
2692 /* Figure out the register where the value, if any, will come back. */ 2686 /* Figure out the register where the value, if any, will come back. */
2693 valreg = 0; 2687 valreg = 0;
2694 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode 2688 if (TYPE_MODE (rettype) != VOIDmode
2695 && ! structure_value_addr) 2689 && ! structure_value_addr)
2696 { 2690 {
2697 if (pcc_struct_value) 2691 if (pcc_struct_value)
2698 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)), 2692 valreg = hard_function_value (build_pointer_type (rettype),
2699 fndecl, NULL, (pass == 0)); 2693 fndecl, NULL, (pass == 0));
2700 else 2694 else
2701 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype, 2695 valreg = hard_function_value (rettype, fndecl, fntype,
2702 (pass == 0)); 2696 (pass == 0));
2703 2697
2704 /* If VALREG is a PARALLEL whose first member has a zero 2698 /* If VALREG is a PARALLEL whose first member has a zero
2705 offset, use that. This is for targets such as m68k that 2699 offset, use that. This is for targets such as m68k that
2706 return the same value in multiple places. */ 2700 return the same value in multiple places. */
2814 if (REG_P (struct_value)) 2808 if (REG_P (struct_value))
2815 use_reg (&call_fusage, struct_value); 2809 use_reg (&call_fusage, struct_value);
2816 } 2810 }
2817 2811
2818 after_args = get_last_insn (); 2812 after_args = get_last_insn ();
2819 funexp = prepare_call_address (funexp, static_chain_value, 2813 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2820 &call_fusage, reg_parm_seen, pass == 0); 2814 &call_fusage, reg_parm_seen, pass == 0);
2821 2815
2822 load_register_parameters (args, num_actuals, &call_fusage, flags, 2816 load_register_parameters (args, num_actuals, &call_fusage, flags,
2823 pass == 0, &sibcall_failure); 2817 pass == 0, &sibcall_failure);
2824 2818
2861 of a register, shift the register right by the appropriate amount 2855 of a register, shift the register right by the appropriate amount
2862 and update VALREG accordingly. BLKmode values are handled by the 2856 and update VALREG accordingly. BLKmode values are handled by the
2863 group load/store machinery below. */ 2857 group load/store machinery below. */
2864 if (!structure_value_addr 2858 if (!structure_value_addr
2865 && !pcc_struct_value 2859 && !pcc_struct_value
2866 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode 2860 && TYPE_MODE (rettype) != BLKmode
2867 && targetm.calls.return_in_msb (TREE_TYPE (exp))) 2861 && targetm.calls.return_in_msb (rettype))
2868 { 2862 {
2869 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) 2863 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
2870 sibcall_failure = 1; 2864 sibcall_failure = 1;
2871 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); 2865 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
2872 } 2866 }
2873 2867
2874 if (pass && (flags & ECF_MALLOC)) 2868 if (pass && (flags & ECF_MALLOC))
2875 { 2869 {
2876 rtx temp = gen_reg_rtx (GET_MODE (valreg)); 2870 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2877 rtx last, insns; 2871 rtx last, insns;
2878 2872
2879 /* The return value from a malloc-like function is a pointer. */ 2873 /* The return value from a malloc-like function is a pointer. */
2880 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE) 2874 if (TREE_CODE (rettype) == POINTER_TYPE)
2881 mark_reg_pointer (temp, BIGGEST_ALIGNMENT); 2875 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2882 2876
2883 emit_move_insn (temp, valreg); 2877 emit_move_insn (temp, valreg);
2884 2878
2885 /* The return value from a malloc-like function can not alias 2879 /* The return value from a malloc-like function can not alias
2925 } 2919 }
2926 } 2920 }
2927 2921
2928 /* If value type not void, return an rtx for the value. */ 2922 /* If value type not void, return an rtx for the value. */
2929 2923
2930 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode 2924 if (TYPE_MODE (rettype) == VOIDmode
2931 || ignore) 2925 || ignore)
2932 target = const0_rtx; 2926 target = const0_rtx;
2933 else if (structure_value_addr) 2927 else if (structure_value_addr)
2934 { 2928 {
2935 if (target == 0 || !MEM_P (target)) 2929 if (target == 0 || !MEM_P (target))
2936 { 2930 {
2937 target 2931 target
2938 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)), 2932 = gen_rtx_MEM (TYPE_MODE (rettype),
2939 memory_address (TYPE_MODE (TREE_TYPE (exp)), 2933 memory_address (TYPE_MODE (rettype),
2940 structure_value_addr)); 2934 structure_value_addr));
2941 set_mem_attributes (target, exp, 1); 2935 set_mem_attributes (target, rettype, 1);
2942 } 2936 }
2943 } 2937 }
2944 else if (pcc_struct_value) 2938 else if (pcc_struct_value)
2945 { 2939 {
2946 /* This is the special C++ case where we need to 2940 /* This is the special C++ case where we need to
2947 know what the true target was. We take care to 2941 know what the true target was. We take care to
2948 never use this value more than once in one expression. */ 2942 never use this value more than once in one expression. */
2949 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)), 2943 target = gen_rtx_MEM (TYPE_MODE (rettype),
2950 copy_to_reg (valreg)); 2944 copy_to_reg (valreg));
2951 set_mem_attributes (target, exp, 1); 2945 set_mem_attributes (target, rettype, 1);
2952 } 2946 }
2953 /* Handle calls that return values in multiple non-contiguous locations. 2947 /* Handle calls that return values in multiple non-contiguous locations.
2954 The Irix 6 ABI has examples of this. */ 2948 The Irix 6 ABI has examples of this. */
2955 else if (GET_CODE (valreg) == PARALLEL) 2949 else if (GET_CODE (valreg) == PARALLEL)
2956 { 2950 {
2957 if (target == 0) 2951 if (target == 0)
2958 { 2952 {
2959 /* This will only be assigned once, so it can be readonly. */ 2953 /* This will only be assigned once, so it can be readonly. */
2960 tree nt = build_qualified_type (TREE_TYPE (exp), 2954 tree nt = build_qualified_type (rettype,
2961 (TYPE_QUALS (TREE_TYPE (exp)) 2955 (TYPE_QUALS (rettype)
2962 | TYPE_QUAL_CONST)); 2956 | TYPE_QUAL_CONST));
2963 2957
2964 target = assign_temp (nt, 0, 1, 1); 2958 target = assign_temp (nt, 0, 1, 1);
2965 } 2959 }
2966 2960
2967 if (! rtx_equal_p (target, valreg)) 2961 if (! rtx_equal_p (target, valreg))
2968 emit_group_store (target, valreg, TREE_TYPE (exp), 2962 emit_group_store (target, valreg, rettype,
2969 int_size_in_bytes (TREE_TYPE (exp))); 2963 int_size_in_bytes (rettype));
2970 2964
2971 /* We can not support sibling calls for this case. */ 2965 /* We can not support sibling calls for this case. */
2972 sibcall_failure = 1; 2966 sibcall_failure = 1;
2973 } 2967 }
2974 else if (target 2968 else if (target
2975 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp)) 2969 && GET_MODE (target) == TYPE_MODE (rettype)
2976 && GET_MODE (target) == GET_MODE (valreg)) 2970 && GET_MODE (target) == GET_MODE (valreg))
2977 { 2971 {
2978 bool may_overlap = false; 2972 bool may_overlap = false;
2979 2973
2980 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard 2974 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3015 optimization cannot be performed in that case. */ 3009 optimization cannot be performed in that case. */
3016 if (MEM_P (target)) 3010 if (MEM_P (target))
3017 sibcall_failure = 1; 3011 sibcall_failure = 1;
3018 } 3012 }
3019 } 3013 }
3020 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode) 3014 else if (TYPE_MODE (rettype) == BLKmode)
3021 { 3015 {
3022 rtx val = valreg; 3016 rtx val = valreg;
3023 if (GET_MODE (val) != BLKmode) 3017 if (GET_MODE (val) != BLKmode)
3024 val = avoid_likely_spilled_reg (val); 3018 val = avoid_likely_spilled_reg (val);
3025 target = copy_blkmode_from_reg (target, val, TREE_TYPE (exp)); 3019 target = copy_blkmode_from_reg (target, val, rettype);
3026 3020
3027 /* We can not support sibling calls for this case. */ 3021 /* We can not support sibling calls for this case. */
3028 sibcall_failure = 1; 3022 sibcall_failure = 1;
3029 } 3023 }
3030 else 3024 else
3031 target = copy_to_reg (avoid_likely_spilled_reg (valreg)); 3025 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3032 3026
3033 if (targetm.calls.promote_function_return(funtype)) 3027 /* If we promoted this return value, make the proper SUBREG.
3034 { 3028 TARGET might be const0_rtx here, so be careful. */
3035 /* If we promoted this return value, make the proper SUBREG. 3029 if (REG_P (target)
3036 TARGET might be const0_rtx here, so be careful. */ 3030 && TYPE_MODE (rettype) != BLKmode
3037 if (REG_P (target) 3031 && GET_MODE (target) != TYPE_MODE (rettype))
3038 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode 3032 {
3039 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) 3033 tree type = rettype;
3034 int unsignedp = TYPE_UNSIGNED (type);
3035 int offset = 0;
3036 enum machine_mode pmode;
3037
3038 /* Ensure we promote as expected, and get the new unsignedness. */
3039 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3040 funtype, 1);
3041 gcc_assert (GET_MODE (target) == pmode);
3042
3043 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3044 && (GET_MODE_SIZE (GET_MODE (target))
3045 > GET_MODE_SIZE (TYPE_MODE (type))))
3040 { 3046 {
3041 tree type = TREE_TYPE (exp); 3047 offset = GET_MODE_SIZE (GET_MODE (target))
3042 int unsignedp = TYPE_UNSIGNED (type); 3048 - GET_MODE_SIZE (TYPE_MODE (type));
3043 int offset = 0; 3049 if (! BYTES_BIG_ENDIAN)
3044 enum machine_mode pmode; 3050 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3045 3051 else if (! WORDS_BIG_ENDIAN)
3046 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); 3052 offset %= UNITS_PER_WORD;
3047 /* If we don't promote as expected, something is wrong. */
3048 gcc_assert (GET_MODE (target) == pmode);
3049
3050 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3051 && (GET_MODE_SIZE (GET_MODE (target))
3052 > GET_MODE_SIZE (TYPE_MODE (type))))
3053 {
3054 offset = GET_MODE_SIZE (GET_MODE (target))
3055 - GET_MODE_SIZE (TYPE_MODE (type));
3056 if (! BYTES_BIG_ENDIAN)
3057 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3058 else if (! WORDS_BIG_ENDIAN)
3059 offset %= UNITS_PER_WORD;
3060 }
3061 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3062 SUBREG_PROMOTED_VAR_P (target) = 1;
3063 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3064 } 3053 }
3054
3055 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3056 SUBREG_PROMOTED_VAR_P (target) = 1;
3057 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3065 } 3058 }
3066 3059
3067 /* If size of args is variable or this was a constructor call for a stack 3060 /* If size of args is variable or this was a constructor call for a stack
3068 argument, restore saved stack-pointer value. */ 3061 argument, restore saved stack-pointer value. */
3069 3062
3445 } 3438 }
3446 3439
3447 for (; count < nargs; count++) 3440 for (; count < nargs; count++)
3448 { 3441 {
3449 rtx val = va_arg (p, rtx); 3442 rtx val = va_arg (p, rtx);
3450 enum machine_mode mode = va_arg (p, enum machine_mode); 3443 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3451 3444
3452 /* We cannot convert the arg value to the mode the library wants here; 3445 /* We cannot convert the arg value to the mode the library wants here;
3453 must do it earlier where we know the signedness of the arg. */ 3446 must do it earlier where we know the signedness of the arg. */
3454 gcc_assert (mode != BLKmode 3447 gcc_assert (mode != BLKmode
3455 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); 3448 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3635 { 3628 {
3636 enum machine_mode mode = argvec[argnum].mode; 3629 enum machine_mode mode = argvec[argnum].mode;
3637 rtx val = argvec[argnum].value; 3630 rtx val = argvec[argnum].value;
3638 rtx reg = argvec[argnum].reg; 3631 rtx reg = argvec[argnum].reg;
3639 int partial = argvec[argnum].partial; 3632 int partial = argvec[argnum].partial;
3633 unsigned int parm_align = argvec[argnum].locate.boundary;
3640 int lower_bound = 0, upper_bound = 0, i; 3634 int lower_bound = 0, upper_bound = 0, i;
3641 3635
3642 if (! (reg != 0 && partial == 0)) 3636 if (! (reg != 0 && partial == 0))
3643 { 3637 {
3644 if (ACCUMULATE_OUTGOING_ARGS) 3638 if (ACCUMULATE_OUTGOING_ARGS)
3696 emit_move_insn (argvec[argnum].save_area, stack_area); 3690 emit_move_insn (argvec[argnum].save_area, stack_area);
3697 } 3691 }
3698 } 3692 }
3699 } 3693 }
3700 3694
3701 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY, 3695 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3702 partial, reg, 0, argblock, 3696 partial, reg, 0, argblock,
3703 GEN_INT (argvec[argnum].locate.offset.constant), 3697 GEN_INT (argvec[argnum].locate.offset.constant),
3704 reg_parm_stack_space, 3698 reg_parm_stack_space,
3705 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); 3699 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3706 3700
3745 if (PUSH_ARGS_REVERSED) 3739 if (PUSH_ARGS_REVERSED)
3746 argnum = nargs - 1; 3740 argnum = nargs - 1;
3747 else 3741 else
3748 argnum = 0; 3742 argnum = 0;
3749 3743
3750 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0); 3744 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3751 3745
3752 /* Now load any reg parms into their regs. */ 3746 /* Now load any reg parms into their regs. */
3753 3747
3754 /* ARGNUM indexes the ARGVEC array in the order in which the arguments 3748 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3755 are to be pushed. */ 3749 are to be pushed. */
3804 3798
3805 /* Don't allow popping to be deferred, since then 3799 /* Don't allow popping to be deferred, since then
3806 cse'ing of library calls could delete a call and leave the pop. */ 3800 cse'ing of library calls could delete a call and leave the pop. */
3807 NO_DEFER_POP; 3801 NO_DEFER_POP;
3808 valreg = (mem_value == 0 && outmode != VOIDmode 3802 valreg = (mem_value == 0 && outmode != VOIDmode
3809 ? hard_libcall_value (outmode) : NULL_RTX); 3803 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3810 3804
3811 /* Stack must be properly aligned now. */ 3805 /* Stack must be properly aligned now. */
3812 gcc_assert (!(stack_pointer_delta 3806 gcc_assert (!(stack_pointer_delta
3813 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); 3807 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3814 3808
3873 value = gen_reg_rtx (outmode); 3867 value = gen_reg_rtx (outmode);
3874 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); 3868 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3875 } 3869 }
3876 else 3870 else
3877 { 3871 {
3878 /* Convert to the proper mode if PROMOTE_MODE has been active. */ 3872 /* Convert to the proper mode if a promotion has been active. */
3879 if (GET_MODE (valreg) != outmode) 3873 if (GET_MODE (valreg) != outmode)
3880 { 3874 {
3881 int unsignedp = TYPE_UNSIGNED (tfom); 3875 int unsignedp = TYPE_UNSIGNED (tfom);
3882 3876
3883 gcc_assert (targetm.calls.promote_function_return (tfom)); 3877 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
3884 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0) 3878 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
3885 == GET_MODE (valreg)); 3879 == GET_MODE (valreg));
3886
3887 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); 3880 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3888 } 3881 }
3889 3882
3890 if (value != 0) 3883 if (value != 0)
3891 emit_move_insn (value, valreg); 3884 emit_move_insn (value, valreg);
4233 for BLKmode is careful to avoid it. */ 4226 for BLKmode is careful to avoid it. */
4234 excess = (arg->locate.size.constant 4227 excess = (arg->locate.size.constant
4235 - int_size_in_bytes (TREE_TYPE (pval)) 4228 - int_size_in_bytes (TREE_TYPE (pval))
4236 + partial); 4229 + partial);
4237 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), 4230 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4238 NULL_RTX, TYPE_MODE (sizetype), 0); 4231 NULL_RTX, TYPE_MODE (sizetype),
4232 EXPAND_NORMAL);
4239 } 4233 }
4240 4234
4241 parm_align = arg->locate.boundary; 4235 parm_align = arg->locate.boundary;
4242 4236
4243 /* When an argument is padded down, the block is aligned to 4237 /* When an argument is padded down, the block is aligned to
4262 4256
4263 if (XEXP (x, 0) == crtl->args.internal_arg_pointer 4257 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4264 || (GET_CODE (XEXP (x, 0)) == PLUS 4258 || (GET_CODE (XEXP (x, 0)) == PLUS
4265 && XEXP (XEXP (x, 0), 0) == 4259 && XEXP (XEXP (x, 0), 0) ==
4266 crtl->args.internal_arg_pointer 4260 crtl->args.internal_arg_pointer
4267 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) 4261 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4268 { 4262 {
4269 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) 4263 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4270 i = INTVAL (XEXP (XEXP (x, 0), 1)); 4264 i = INTVAL (XEXP (XEXP (x, 0), 1));
4271 4265
4272 /* expand_call should ensure this. */ 4266 /* expand_call should ensure this. */
4273 gcc_assert (!arg->locate.offset.var 4267 gcc_assert (!arg->locate.offset.var
4274 && arg->locate.size.var == 0 4268 && arg->locate.size.var == 0
4275 && GET_CODE (size_rtx) == CONST_INT); 4269 && CONST_INT_P (size_rtx));
4276 4270
4277 if (arg->locate.offset.constant > i) 4271 if (arg->locate.offset.constant > i)
4278 { 4272 {
4279 if (arg->locate.offset.constant < i + INTVAL (size_rtx)) 4273 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4280 sibcall_failure = 1; 4274 sibcall_failure = 1;