comparison gcc/calls.c @ 57:326d9e06c2e3

modify c-parser.c
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 15 Feb 2010 00:54:17 +0900
parents 27e6f95b2c21 77e2b8dfacca
children 60c1b2f8487a
comparison
equal deleted inserted replaced
54:f62c169bbc24 57:326d9e06c2e3
37 #include "tm_p.h" 37 #include "tm_p.h"
38 #include "timevar.h" 38 #include "timevar.h"
39 #include "sbitmap.h" 39 #include "sbitmap.h"
40 #include "langhooks.h" 40 #include "langhooks.h"
41 #include "target.h" 41 #include "target.h"
42 #include "debug.h"
42 #include "cgraph.h" 43 #include "cgraph.h"
43 #include "except.h" 44 #include "except.h"
44 #include "dbgcnt.h" 45 #include "dbgcnt.h"
45 #include "tree-flow.h" 46 #include "tree-flow.h"
46 47
167 168
168 CALL_FUSAGE points to a variable holding the prospective 169 CALL_FUSAGE points to a variable holding the prospective
169 CALL_INSN_FUNCTION_USAGE information. */ 170 CALL_INSN_FUNCTION_USAGE information. */
170 171
171 rtx 172 rtx
172 prepare_call_address (rtx funexp, rtx static_chain_value, 173 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
173 rtx *call_fusage, int reg_parm_seen, int sibcallp) 174 rtx *call_fusage, int reg_parm_seen, int sibcallp)
174 { 175 {
175 /* Make a valid memory address and copy constants through pseudo-regs, 176 /* Make a valid memory address and copy constants through pseudo-regs,
176 but not for a constant address if -fno-function-cse. */ 177 but not for a constant address if -fno-function-cse. */
177 if (GET_CODE (funexp) != SYMBOL_REF) 178 if (GET_CODE (funexp) != SYMBOL_REF)
188 #endif 189 #endif
189 } 190 }
190 191
191 if (static_chain_value != 0) 192 if (static_chain_value != 0)
192 { 193 {
194 rtx chain;
195
196 gcc_assert (fndecl);
197 chain = targetm.calls.static_chain (fndecl, false);
193 static_chain_value = convert_memory_address (Pmode, static_chain_value); 198 static_chain_value = convert_memory_address (Pmode, static_chain_value);
194 emit_move_insn (static_chain_rtx, static_chain_value); 199
195 200 emit_move_insn (chain, static_chain_value);
196 if (REG_P (static_chain_rtx)) 201 if (REG_P (chain))
197 use_reg (call_fusage, static_chain_rtx); 202 use_reg (call_fusage, chain);
198 } 203 }
199 204
200 return funexp; 205 return funexp;
201 } 206 }
202 207
239 244
240 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that 245 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
241 denote registers used by the called function. */ 246 denote registers used by the called function. */
242 247
243 static void 248 static void
244 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED, 249 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
245 tree funtype ATTRIBUTE_UNUSED, 250 tree funtype ATTRIBUTE_UNUSED,
246 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, 251 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
247 HOST_WIDE_INT rounded_stack_size, 252 HOST_WIDE_INT rounded_stack_size,
248 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, 253 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
249 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, 254 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
252 { 257 {
253 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); 258 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
254 rtx call_insn; 259 rtx call_insn;
255 int already_popped = 0; 260 int already_popped = 0;
256 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size); 261 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
257 #if defined (HAVE_call) && defined (HAVE_call_value)
258 rtx struct_value_size_rtx;
259 struct_value_size_rtx = GEN_INT (struct_value_size);
260 #endif
261 262
262 #ifdef CALL_POPS_ARGS 263 #ifdef CALL_POPS_ARGS
263 n_popped += CALL_POPS_ARGS (* args_so_far); 264 n_popped += CALL_POPS_ARGS (* args_so_far);
264 #endif 265 #endif
265 266
337 rounded_stack_size_rtx, 338 rounded_stack_size_rtx,
338 next_arg_reg, NULL_RTX)); 339 next_arg_reg, NULL_RTX));
339 else 340 else
340 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp), 341 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
341 rounded_stack_size_rtx, next_arg_reg, 342 rounded_stack_size_rtx, next_arg_reg,
342 struct_value_size_rtx)); 343 GEN_INT (struct_value_size)));
343 } 344 }
344 else 345 else
345 #endif 346 #endif
346 347
347 #if defined (HAVE_call) && defined (HAVE_call_value) 348 #if defined (HAVE_call) && defined (HAVE_call_value)
353 rounded_stack_size_rtx, next_arg_reg, 354 rounded_stack_size_rtx, next_arg_reg,
354 NULL_RTX)); 355 NULL_RTX));
355 else 356 else
356 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp), 357 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
357 rounded_stack_size_rtx, next_arg_reg, 358 rounded_stack_size_rtx, next_arg_reg,
358 struct_value_size_rtx)); 359 GEN_INT (struct_value_size)));
359 } 360 }
360 else 361 else
361 #endif 362 #endif
362 gcc_unreachable (); 363 gcc_unreachable ();
363 364
377 378
378 /* If this is a const call, then set the insn's unchanging bit. */ 379 /* If this is a const call, then set the insn's unchanging bit. */
379 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE) 380 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
380 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1; 381 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
381 382
382 /* If this call can't throw, attach a REG_EH_REGION reg note to that 383 /* Create a nothrow REG_EH_REGION note, if needed. */
383 effect. */ 384 make_reg_eh_region_note (call_insn, ecf_flags, 0);
384 if (ecf_flags & ECF_NOTHROW)
385 add_reg_note (call_insn, REG_EH_REGION, const0_rtx);
386 else
387 {
388 int rn = lookup_expr_eh_region (fntree);
389
390 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
391 throw, which we already took care of. */
392 if (rn > 0)
393 add_reg_note (call_insn, REG_EH_REGION, GEN_INT (rn));
394 }
395 385
396 if (ecf_flags & ECF_NORETURN) 386 if (ecf_flags & ECF_NORETURN)
397 add_reg_note (call_insn, REG_NORETURN, const0_rtx); 387 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
398 388
399 if (ecf_flags & ECF_RETURNS_TWICE) 389 if (ecf_flags & ECF_RETURNS_TWICE)
401 add_reg_note (call_insn, REG_SETJMP, const0_rtx); 391 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
402 cfun->calls_setjmp = 1; 392 cfun->calls_setjmp = 1;
403 } 393 }
404 394
405 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0); 395 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
396
397 /* Record debug information for virtual calls. */
398 if (flag_enable_icf_debug && fndecl == NULL)
399 (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
400 INSN_UID (call_insn));
406 401
407 /* Restore this now, so that we do defer pops for this call's args 402 /* Restore this now, so that we do defer pops for this call's args
408 if the context of the call as a whole permits. */ 403 if the context of the call as a whole permits. */
409 inhibit_defer_pop = old_inhibit_defer_pop; 404 inhibit_defer_pop = old_inhibit_defer_pop;
410 405
591 586
592 int 587 int
593 flags_from_decl_or_type (const_tree exp) 588 flags_from_decl_or_type (const_tree exp)
594 { 589 {
595 int flags = 0; 590 int flags = 0;
596 const_tree type = exp;
597 591
598 if (DECL_P (exp)) 592 if (DECL_P (exp))
599 { 593 {
600 type = TREE_TYPE (exp);
601
602 /* The function exp may have the `malloc' attribute. */ 594 /* The function exp may have the `malloc' attribute. */
603 if (DECL_IS_MALLOC (exp)) 595 if (DECL_IS_MALLOC (exp))
604 flags |= ECF_MALLOC; 596 flags |= ECF_MALLOC;
605 597
606 /* The function exp may have the `returns_twice' attribute. */ 598 /* The function exp may have the `returns_twice' attribute. */
908 } 900 }
909 } 901 }
910 } 902 }
911 903
912 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in 904 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
913 CALL_EXPR EXP. 905 CALL_EXPR EXP.
914 906
915 NUM_ACTUALS is the total number of parameters. 907 NUM_ACTUALS is the total number of parameters.
916 908
917 N_NAMED_ARGS is the total number of named arguments. 909 N_NAMED_ARGS is the total number of named arguments.
918 910
950 int reg_parm_stack_space, 942 int reg_parm_stack_space,
951 rtx *old_stack_level, int *old_pending_adj, 943 rtx *old_stack_level, int *old_pending_adj,
952 int *must_preallocate, int *ecf_flags, 944 int *must_preallocate, int *ecf_flags,
953 bool *may_tailcall, bool call_from_thunk_p) 945 bool *may_tailcall, bool call_from_thunk_p)
954 { 946 {
947 location_t loc = EXPR_LOCATION (exp);
955 /* 1 if scanning parms front to back, -1 if scanning back to front. */ 948 /* 1 if scanning parms front to back, -1 if scanning back to front. */
956 int inc; 949 int inc;
957 950
958 /* Count arg position in order args appear. */ 951 /* Count arg position in order args appear. */
959 int argpos; 952 int argpos;
1055 instead of making a copy. */ 1048 instead of making a copy. */
1056 if (call_from_thunk_p 1049 if (call_from_thunk_p
1057 || (callee_copies 1050 || (callee_copies
1058 && !TREE_ADDRESSABLE (type) 1051 && !TREE_ADDRESSABLE (type)
1059 && (base = get_base_address (args[i].tree_value)) 1052 && (base = get_base_address (args[i].tree_value))
1053 && TREE_CODE (base) != SSA_NAME
1060 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))) 1054 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1061 { 1055 {
1062 /* We can't use sibcalls if a callee-copied argument is 1056 /* We can't use sibcalls if a callee-copied argument is
1063 stored in the current function's frame. */ 1057 stored in the current function's frame. */
1064 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) 1058 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1065 *may_tailcall = false; 1059 *may_tailcall = false;
1066 1060
1067 args[i].tree_value = build_fold_addr_expr (args[i].tree_value); 1061 args[i].tree_value = build_fold_addr_expr_loc (loc,
1062 args[i].tree_value);
1068 type = TREE_TYPE (args[i].tree_value); 1063 type = TREE_TYPE (args[i].tree_value);
1069 1064
1070 if (*ecf_flags & ECF_CONST) 1065 if (*ecf_flags & ECF_CONST)
1071 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); 1066 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1072 } 1067 }
1114 1109
1115 if (!callee_copies && *ecf_flags & ECF_PURE) 1110 if (!callee_copies && *ecf_flags & ECF_PURE)
1116 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); 1111 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1117 1112
1118 args[i].tree_value 1113 args[i].tree_value
1119 = build_fold_addr_expr (make_tree (type, copy)); 1114 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1120 type = TREE_TYPE (args[i].tree_value); 1115 type = TREE_TYPE (args[i].tree_value);
1121 *may_tailcall = false; 1116 *may_tailcall = false;
1122 } 1117 }
1123 } 1118 }
1124 1119
1125 mode = TYPE_MODE (type);
1126 unsignedp = TYPE_UNSIGNED (type); 1120 unsignedp = TYPE_UNSIGNED (type);
1127 1121 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1128 if (targetm.calls.promote_function_args (fndecl 1122 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1129 ? TREE_TYPE (fndecl)
1130 : fntype))
1131 mode = promote_mode (type, mode, &unsignedp, 1);
1132 1123
1133 args[i].unsignedp = unsignedp; 1124 args[i].unsignedp = unsignedp;
1134 args[i].mode = mode; 1125 args[i].mode = mode;
1135 1126
1136 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type, 1127 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1306 if (!ACCUMULATE_OUTGOING_ARGS) 1297 if (!ACCUMULATE_OUTGOING_ARGS)
1307 return; 1298 return;
1308 1299
1309 for (i = 0; i < num_actuals; i++) 1300 for (i = 0; i < num_actuals; i++)
1310 { 1301 {
1302 tree type;
1311 enum machine_mode mode; 1303 enum machine_mode mode;
1312 1304
1313 if (TREE_CODE (args[i].tree_value) != CALL_EXPR) 1305 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1314 continue; 1306 continue;
1315 1307
1316 /* If this is an addressable type, we cannot pre-evaluate it. */ 1308 /* If this is an addressable type, we cannot pre-evaluate it. */
1317 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))); 1309 type = TREE_TYPE (args[i].tree_value);
1310 gcc_assert (!TREE_ADDRESSABLE (type));
1318 1311
1319 args[i].initial_value = args[i].value 1312 args[i].initial_value = args[i].value
1320 = expand_normal (args[i].tree_value); 1313 = expand_normal (args[i].tree_value);
1321 1314
1322 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value)); 1315 mode = TYPE_MODE (type);
1323 if (mode != args[i].mode) 1316 if (mode != args[i].mode)
1324 { 1317 {
1318 int unsignedp = args[i].unsignedp;
1325 args[i].value 1319 args[i].value
1326 = convert_modes (args[i].mode, mode, 1320 = convert_modes (args[i].mode, mode,
1327 args[i].value, args[i].unsignedp); 1321 args[i].value, args[i].unsignedp);
1328 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE) 1322
1329 /* CSE will replace this only if it contains args[i].value 1323 /* CSE will replace this only if it contains args[i].value
1330 pseudo, so convert it down to the declared mode using 1324 pseudo, so convert it down to the declared mode using
1331 a SUBREG. */ 1325 a SUBREG. */
1332 if (REG_P (args[i].value) 1326 if (REG_P (args[i].value)
1333 && GET_MODE_CLASS (args[i].mode) == MODE_INT) 1327 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1328 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1334 { 1329 {
1335 args[i].initial_value 1330 args[i].initial_value
1336 = gen_lowpart_SUBREG (mode, args[i].value); 1331 = gen_lowpart_SUBREG (mode, args[i].value);
1337 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; 1332 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1338 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value, 1333 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1339 args[i].unsignedp); 1334 args[i].unsignedp);
1340 } 1335 }
1341 #endif
1342 } 1336 }
1343 } 1337 }
1344 } 1338 }
1345 1339
1346 /* Given the current state of MUST_PREALLOCATE and information about 1340 /* Given the current state of MUST_PREALLOCATE and information about
1347 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE, 1341 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1348 compute and return the final value for MUST_PREALLOCATE. */ 1342 compute and return the final value for MUST_PREALLOCATE. */
1349 1343
1350 static int 1344 static int
1351 finalize_must_preallocate (int must_preallocate, int num_actuals, 1345 finalize_must_preallocate (int must_preallocate, int num_actuals,
1352 struct arg_data *args, struct args_size *args_size) 1346 struct arg_data *args, struct args_size *args_size)
1353 { 1347 {
1354 /* See if we have or want to preallocate stack space. 1348 /* See if we have or want to preallocate stack space.
1355 1349
1356 If we would have to push a partially-in-regs parm 1350 If we would have to push a partially-in-regs parm
1431 if (! args[i].pass_on_stack 1425 if (! args[i].pass_on_stack
1432 && args[i].reg != 0 1426 && args[i].reg != 0
1433 && args[i].partial == 0) 1427 && args[i].partial == 0)
1434 continue; 1428 continue;
1435 1429
1436 if (GET_CODE (offset) == CONST_INT) 1430 if (CONST_INT_P (offset))
1437 addr = plus_constant (arg_reg, INTVAL (offset)); 1431 addr = plus_constant (arg_reg, INTVAL (offset));
1438 else 1432 else
1439 addr = gen_rtx_PLUS (Pmode, arg_reg, offset); 1433 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1440 1434
1441 addr = plus_constant (addr, arg_offset); 1435 addr = plus_constant (addr, arg_offset);
1458 } 1452 }
1459 align = BITS_PER_UNIT; 1453 align = BITS_PER_UNIT;
1460 boundary = args[i].locate.boundary; 1454 boundary = args[i].locate.boundary;
1461 if (args[i].locate.where_pad != downward) 1455 if (args[i].locate.where_pad != downward)
1462 align = boundary; 1456 align = boundary;
1463 else if (GET_CODE (offset) == CONST_INT) 1457 else if (CONST_INT_P (offset))
1464 { 1458 {
1465 align = INTVAL (offset) * BITS_PER_UNIT | boundary; 1459 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1466 align = align & -align; 1460 align = align & -align;
1467 } 1461 }
1468 set_mem_align (args[i].stack, align); 1462 set_mem_align (args[i].stack, align);
1469 1463
1470 if (GET_CODE (slot_offset) == CONST_INT) 1464 if (CONST_INT_P (slot_offset))
1471 addr = plus_constant (arg_reg, INTVAL (slot_offset)); 1465 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1472 else 1466 else
1473 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset); 1467 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1474 1468
1475 addr = plus_constant (addr, arg_offset); 1469 addr = plus_constant (addr, arg_offset);
1549 1543
1550 if (addr == crtl->args.internal_arg_pointer) 1544 if (addr == crtl->args.internal_arg_pointer)
1551 i = 0; 1545 i = 0;
1552 else if (GET_CODE (addr) == PLUS 1546 else if (GET_CODE (addr) == PLUS
1553 && XEXP (addr, 0) == crtl->args.internal_arg_pointer 1547 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1554 && GET_CODE (XEXP (addr, 1)) == CONST_INT) 1548 && CONST_INT_P (XEXP (addr, 1)))
1555 i = INTVAL (XEXP (addr, 1)); 1549 i = INTVAL (XEXP (addr, 1));
1556 /* Return true for arg pointer based indexed addressing. */ 1550 /* Return true for arg pointer based indexed addressing. */
1557 else if (GET_CODE (addr) == PLUS 1551 else if (GET_CODE (addr) == PLUS
1558 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer 1552 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1559 || XEXP (addr, 1) == crtl->args.internal_arg_pointer)) 1553 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1931 /* Sequence of insns to perform a tail "call". */ 1925 /* Sequence of insns to perform a tail "call". */
1932 rtx tail_call_insns = NULL_RTX; 1926 rtx tail_call_insns = NULL_RTX;
1933 /* Data type of the function. */ 1927 /* Data type of the function. */
1934 tree funtype; 1928 tree funtype;
1935 tree type_arg_types; 1929 tree type_arg_types;
1930 tree rettype;
1936 /* Declaration of the function being called, 1931 /* Declaration of the function being called,
1937 or 0 if the function is computed (not known by name). */ 1932 or 0 if the function is computed (not known by name). */
1938 tree fndecl = 0; 1933 tree fndecl = 0;
1939 /* The type of the function being called. */ 1934 /* The type of the function being called. */
1940 tree fntype; 1935 tree fntype;
2026 allocate_dynamic_stack_space. This modifies the stack_pointer_delta, 2021 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2027 which we then also need to save/restore along the way. */ 2022 which we then also need to save/restore along the way. */
2028 int old_stack_pointer_delta = 0; 2023 int old_stack_pointer_delta = 0;
2029 2024
2030 rtx call_fusage; 2025 rtx call_fusage;
2031 tree p = CALL_EXPR_FN (exp);
2032 tree addr = CALL_EXPR_FN (exp); 2026 tree addr = CALL_EXPR_FN (exp);
2033 int i; 2027 int i;
2034 /* The alignment of the stack, in bits. */ 2028 /* The alignment of the stack, in bits. */
2035 unsigned HOST_WIDE_INT preferred_stack_boundary; 2029 unsigned HOST_WIDE_INT preferred_stack_boundary;
2036 /* The alignment of the stack, in bytes. */ 2030 /* The alignment of the stack, in bytes. */
2049 fntype = TREE_TYPE (fndecl); 2043 fntype = TREE_TYPE (fndecl);
2050 flags |= flags_from_decl_or_type (fndecl); 2044 flags |= flags_from_decl_or_type (fndecl);
2051 } 2045 }
2052 else 2046 else
2053 { 2047 {
2054 fntype = TREE_TYPE (TREE_TYPE (p)); 2048 fntype = TREE_TYPE (TREE_TYPE (addr));
2055 flags |= flags_from_decl_or_type (fntype); 2049 flags |= flags_from_decl_or_type (fntype);
2056 } 2050 }
2051 rettype = TREE_TYPE (exp);
2057 2052
2058 struct_value = targetm.calls.struct_value_rtx (fntype, 0); 2053 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2059 2054
2060 /* Warn if this value is an aggregate type, 2055 /* Warn if this value is an aggregate type,
2061 regardless of which calling convention we are using for it. */ 2056 regardless of which calling convention we are using for it. */
2062 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))) 2057 if (AGGREGATE_TYPE_P (rettype))
2063 warning (OPT_Waggregate_return, "function call has aggregate value"); 2058 warning (OPT_Waggregate_return, "function call has aggregate value");
2064 2059
2065 /* If the result of a non looping pure or const function call is 2060 /* If the result of a non looping pure or const function call is
2066 ignored (or void), and none of its arguments are volatile, we can 2061 ignored (or void), and none of its arguments are volatile, we can
2067 avoid expanding the call and just evaluate the arguments for 2062 avoid expanding the call and just evaluate the arguments for
2068 side-effects. */ 2063 side-effects. */
2069 if ((flags & (ECF_CONST | ECF_PURE)) 2064 if ((flags & (ECF_CONST | ECF_PURE))
2070 && (!(flags & ECF_LOOPING_CONST_OR_PURE)) 2065 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2071 && (ignore || target == const0_rtx 2066 && (ignore || target == const0_rtx
2072 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)) 2067 || TYPE_MODE (rettype) == VOIDmode))
2073 { 2068 {
2074 bool volatilep = false; 2069 bool volatilep = false;
2075 tree arg; 2070 tree arg;
2076 call_expr_arg_iterator iter; 2071 call_expr_arg_iterator iter;
2077 2072
2110 { 2105 {
2111 pcc_struct_value = 1; 2106 pcc_struct_value = 1;
2112 } 2107 }
2113 #else /* not PCC_STATIC_STRUCT_RETURN */ 2108 #else /* not PCC_STATIC_STRUCT_RETURN */
2114 { 2109 {
2115 struct_value_size = int_size_in_bytes (TREE_TYPE (exp)); 2110 struct_value_size = int_size_in_bytes (rettype);
2116 2111
2117 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp)) 2112 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2118 structure_value_addr = XEXP (target, 0); 2113 structure_value_addr = XEXP (target, 0);
2119 else 2114 else
2120 { 2115 {
2121 /* For variable-sized objects, we must be called with a target 2116 /* For variable-sized objects, we must be called with a target
2122 specified. If we were to allocate space on the stack here, 2117 specified. If we were to allocate space on the stack here,
2123 we would have no way of knowing when to free it. */ 2118 we would have no way of knowing when to free it. */
2124 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1); 2119 rtx d = assign_temp (rettype, 0, 1, 1);
2125 2120
2126 mark_temp_addr_taken (d); 2121 mark_temp_addr_taken (d);
2127 structure_value_addr = XEXP (d, 0); 2122 structure_value_addr = XEXP (d, 0);
2128 target = 0; 2123 target = 0;
2129 } 2124 }
2290 there's cleanups, as we know there's code to follow the call. */ 2285 there's cleanups, as we know there's code to follow the call. */
2291 2286
2292 if (currently_expanding_call++ != 0 2287 if (currently_expanding_call++ != 0
2293 || !flag_optimize_sibling_calls 2288 || !flag_optimize_sibling_calls
2294 || args_size.var 2289 || args_size.var
2295 || lookup_expr_eh_region (exp) >= 0
2296 || dbg_cnt (tail_call) == false) 2290 || dbg_cnt (tail_call) == false)
2297 try_tail_call = 0; 2291 try_tail_call = 0;
2298 2292
2299 /* Rest of purposes for tail call optimizations to fail. */ 2293 /* Rest of purposes for tail call optimizations to fail. */
2300 if ( 2294 if (
2349 enum machine_mode callee_mode, callee_promoted_mode; 2343 enum machine_mode callee_mode, callee_promoted_mode;
2350 int caller_unsignedp, callee_unsignedp; 2344 int caller_unsignedp, callee_unsignedp;
2351 tree caller_res = DECL_RESULT (current_function_decl); 2345 tree caller_res = DECL_RESULT (current_function_decl);
2352 2346
2353 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res)); 2347 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2354 caller_mode = caller_promoted_mode = DECL_MODE (caller_res); 2348 caller_mode = DECL_MODE (caller_res);
2355 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); 2349 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2356 callee_mode = callee_promoted_mode = TYPE_MODE (TREE_TYPE (funtype)); 2350 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2357 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) 2351 caller_promoted_mode
2358 caller_promoted_mode 2352 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2359 = promote_mode (TREE_TYPE (caller_res), caller_mode, 2353 &caller_unsignedp,
2360 &caller_unsignedp, 1); 2354 TREE_TYPE (current_function_decl), 1);
2361 if (targetm.calls.promote_function_return (funtype)) 2355 callee_promoted_mode
2362 callee_promoted_mode 2356 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2363 = promote_mode (TREE_TYPE (funtype), callee_mode, 2357 &callee_unsignedp,
2364 &callee_unsignedp, 1); 2358 funtype, 1);
2365 if (caller_mode != VOIDmode 2359 if (caller_mode != VOIDmode
2366 && (caller_promoted_mode != callee_promoted_mode 2360 && (caller_promoted_mode != callee_promoted_mode
2367 || ((caller_mode != caller_promoted_mode 2361 || ((caller_mode != caller_promoted_mode
2368 || callee_mode != callee_promoted_mode) 2362 || callee_mode != callee_promoted_mode)
2369 && (caller_unsignedp != callee_unsignedp 2363 && (caller_unsignedp != callee_unsignedp
2756 2750
2757 funexp = rtx_for_function_call (fndecl, addr); 2751 funexp = rtx_for_function_call (fndecl, addr);
2758 2752
2759 /* Figure out the register where the value, if any, will come back. */ 2753 /* Figure out the register where the value, if any, will come back. */
2760 valreg = 0; 2754 valreg = 0;
2761 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode 2755 if (TYPE_MODE (rettype) != VOIDmode
2762 && ! structure_value_addr) 2756 && ! structure_value_addr)
2763 { 2757 {
2764 if (pcc_struct_value) 2758 if (pcc_struct_value)
2765 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)), 2759 valreg = hard_function_value (build_pointer_type (rettype),
2766 fndecl, NULL, (pass == 0)); 2760 fndecl, NULL, (pass == 0));
2767 else 2761 else
2768 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype, 2762 valreg = hard_function_value (rettype, fndecl, fntype,
2769 (pass == 0)); 2763 (pass == 0));
2770 2764
2771 /* If VALREG is a PARALLEL whose first member has a zero 2765 /* If VALREG is a PARALLEL whose first member has a zero
2772 offset, use that. This is for targets such as m68k that 2766 offset, use that. This is for targets such as m68k that
2773 return the same value in multiple places. */ 2767 return the same value in multiple places. */
2881 if (REG_P (struct_value)) 2875 if (REG_P (struct_value))
2882 use_reg (&call_fusage, struct_value); 2876 use_reg (&call_fusage, struct_value);
2883 } 2877 }
2884 2878
2885 after_args = get_last_insn (); 2879 after_args = get_last_insn ();
2886 funexp = prepare_call_address (funexp, static_chain_value, 2880 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2887 &call_fusage, reg_parm_seen, pass == 0); 2881 &call_fusage, reg_parm_seen, pass == 0);
2888 2882
2889 load_register_parameters (args, num_actuals, &call_fusage, flags, 2883 load_register_parameters (args, num_actuals, &call_fusage, flags,
2890 pass == 0, &sibcall_failure); 2884 pass == 0, &sibcall_failure);
2891 2885
2928 of a register, shift the register right by the appropriate amount 2922 of a register, shift the register right by the appropriate amount
2929 and update VALREG accordingly. BLKmode values are handled by the 2923 and update VALREG accordingly. BLKmode values are handled by the
2930 group load/store machinery below. */ 2924 group load/store machinery below. */
2931 if (!structure_value_addr 2925 if (!structure_value_addr
2932 && !pcc_struct_value 2926 && !pcc_struct_value
2933 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode 2927 && TYPE_MODE (rettype) != BLKmode
2934 && targetm.calls.return_in_msb (TREE_TYPE (exp))) 2928 && targetm.calls.return_in_msb (rettype))
2935 { 2929 {
2936 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) 2930 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
2937 sibcall_failure = 1; 2931 sibcall_failure = 1;
2938 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); 2932 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
2939 } 2933 }
2940 2934
2941 if (pass && (flags & ECF_MALLOC)) 2935 if (pass && (flags & ECF_MALLOC))
2942 { 2936 {
2943 rtx temp = gen_reg_rtx (GET_MODE (valreg)); 2937 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2944 rtx last, insns; 2938 rtx last, insns;
2945 2939
2946 /* The return value from a malloc-like function is a pointer. */ 2940 /* The return value from a malloc-like function is a pointer. */
2947 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE) 2941 if (TREE_CODE (rettype) == POINTER_TYPE)
2948 mark_reg_pointer (temp, BIGGEST_ALIGNMENT); 2942 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2949 2943
2950 emit_move_insn (temp, valreg); 2944 emit_move_insn (temp, valreg);
2951 2945
2952 /* The return value from a malloc-like function can not alias 2946 /* The return value from a malloc-like function can not alias
2992 } 2986 }
2993 } 2987 }
2994 2988
2995 /* If value type not void, return an rtx for the value. */ 2989 /* If value type not void, return an rtx for the value. */
2996 2990
2997 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode 2991 if (TYPE_MODE (rettype) == VOIDmode
2998 || ignore) 2992 || ignore)
2999 target = const0_rtx; 2993 target = const0_rtx;
3000 else if (structure_value_addr) 2994 else if (structure_value_addr)
3001 { 2995 {
3002 if (target == 0 || !MEM_P (target)) 2996 if (target == 0 || !MEM_P (target))
3003 { 2997 {
3004 target 2998 target
3005 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)), 2999 = gen_rtx_MEM (TYPE_MODE (rettype),
3006 memory_address (TYPE_MODE (TREE_TYPE (exp)), 3000 memory_address (TYPE_MODE (rettype),
3007 structure_value_addr)); 3001 structure_value_addr));
3008 set_mem_attributes (target, exp, 1); 3002 set_mem_attributes (target, rettype, 1);
3009 } 3003 }
3010 } 3004 }
3011 else if (pcc_struct_value) 3005 else if (pcc_struct_value)
3012 { 3006 {
3013 /* This is the special C++ case where we need to 3007 /* This is the special C++ case where we need to
3014 know what the true target was. We take care to 3008 know what the true target was. We take care to
3015 never use this value more than once in one expression. */ 3009 never use this value more than once in one expression. */
3016 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)), 3010 target = gen_rtx_MEM (TYPE_MODE (rettype),
3017 copy_to_reg (valreg)); 3011 copy_to_reg (valreg));
3018 set_mem_attributes (target, exp, 1); 3012 set_mem_attributes (target, rettype, 1);
3019 } 3013 }
3020 /* Handle calls that return values in multiple non-contiguous locations. 3014 /* Handle calls that return values in multiple non-contiguous locations.
3021 The Irix 6 ABI has examples of this. */ 3015 The Irix 6 ABI has examples of this. */
3022 else if (GET_CODE (valreg) == PARALLEL) 3016 else if (GET_CODE (valreg) == PARALLEL)
3023 { 3017 {
3024 if (target == 0) 3018 if (target == 0)
3025 { 3019 {
3026 /* This will only be assigned once, so it can be readonly. */ 3020 /* This will only be assigned once, so it can be readonly. */
3027 tree nt = build_qualified_type (TREE_TYPE (exp), 3021 tree nt = build_qualified_type (rettype,
3028 (TYPE_QUALS (TREE_TYPE (exp)) 3022 (TYPE_QUALS (rettype)
3029 | TYPE_QUAL_CONST)); 3023 | TYPE_QUAL_CONST));
3030 3024
3031 target = assign_temp (nt, 0, 1, 1); 3025 target = assign_temp (nt, 0, 1, 1);
3032 } 3026 }
3033 3027
3034 if (! rtx_equal_p (target, valreg)) 3028 if (! rtx_equal_p (target, valreg))
3035 emit_group_store (target, valreg, TREE_TYPE (exp), 3029 emit_group_store (target, valreg, rettype,
3036 int_size_in_bytes (TREE_TYPE (exp))); 3030 int_size_in_bytes (rettype));
3037 3031
3038 /* We can not support sibling calls for this case. */ 3032 /* We can not support sibling calls for this case. */
3039 sibcall_failure = 1; 3033 sibcall_failure = 1;
3040 } 3034 }
3041 else if (target 3035 else if (target
3042 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp)) 3036 && GET_MODE (target) == TYPE_MODE (rettype)
3043 && GET_MODE (target) == GET_MODE (valreg)) 3037 && GET_MODE (target) == GET_MODE (valreg))
3044 { 3038 {
3045 bool may_overlap = false; 3039 bool may_overlap = false;
3046 3040
3047 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard 3041 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3082 optimization cannot be performed in that case. */ 3076 optimization cannot be performed in that case. */
3083 if (MEM_P (target)) 3077 if (MEM_P (target))
3084 sibcall_failure = 1; 3078 sibcall_failure = 1;
3085 } 3079 }
3086 } 3080 }
3087 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode) 3081 else if (TYPE_MODE (rettype) == BLKmode)
3088 { 3082 {
3089 rtx val = valreg; 3083 rtx val = valreg;
3090 if (GET_MODE (val) != BLKmode) 3084 if (GET_MODE (val) != BLKmode)
3091 val = avoid_likely_spilled_reg (val); 3085 val = avoid_likely_spilled_reg (val);
3092 target = copy_blkmode_from_reg (target, val, TREE_TYPE (exp)); 3086 target = copy_blkmode_from_reg (target, val, rettype);
3093 3087
3094 /* We can not support sibling calls for this case. */ 3088 /* We can not support sibling calls for this case. */
3095 sibcall_failure = 1; 3089 sibcall_failure = 1;
3096 } 3090 }
3097 else 3091 else
3098 target = copy_to_reg (avoid_likely_spilled_reg (valreg)); 3092 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3099 3093
3100 if (targetm.calls.promote_function_return(funtype)) 3094 /* If we promoted this return value, make the proper SUBREG.
3101 { 3095 TARGET might be const0_rtx here, so be careful. */
3102 /* If we promoted this return value, make the proper SUBREG. 3096 if (REG_P (target)
3103 TARGET might be const0_rtx here, so be careful. */ 3097 && TYPE_MODE (rettype) != BLKmode
3104 if (REG_P (target) 3098 && GET_MODE (target) != TYPE_MODE (rettype))
3105 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode 3099 {
3106 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) 3100 tree type = rettype;
3101 int unsignedp = TYPE_UNSIGNED (type);
3102 int offset = 0;
3103 enum machine_mode pmode;
3104
3105 /* Ensure we promote as expected, and get the new unsignedness. */
3106 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3107 funtype, 1);
3108 gcc_assert (GET_MODE (target) == pmode);
3109
3110 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3111 && (GET_MODE_SIZE (GET_MODE (target))
3112 > GET_MODE_SIZE (TYPE_MODE (type))))
3107 { 3113 {
3108 tree type = TREE_TYPE (exp); 3114 offset = GET_MODE_SIZE (GET_MODE (target))
3109 int unsignedp = TYPE_UNSIGNED (type); 3115 - GET_MODE_SIZE (TYPE_MODE (type));
3110 int offset = 0; 3116 if (! BYTES_BIG_ENDIAN)
3111 enum machine_mode pmode; 3117 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3112 3118 else if (! WORDS_BIG_ENDIAN)
3113 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); 3119 offset %= UNITS_PER_WORD;
3114 /* If we don't promote as expected, something is wrong. */
3115 gcc_assert (GET_MODE (target) == pmode);
3116
3117 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3118 && (GET_MODE_SIZE (GET_MODE (target))
3119 > GET_MODE_SIZE (TYPE_MODE (type))))
3120 {
3121 offset = GET_MODE_SIZE (GET_MODE (target))
3122 - GET_MODE_SIZE (TYPE_MODE (type));
3123 if (! BYTES_BIG_ENDIAN)
3124 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3125 else if (! WORDS_BIG_ENDIAN)
3126 offset %= UNITS_PER_WORD;
3127 }
3128 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3129 SUBREG_PROMOTED_VAR_P (target) = 1;
3130 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3131 } 3120 }
3121
3122 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3123 SUBREG_PROMOTED_VAR_P (target) = 1;
3124 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3132 } 3125 }
3133 3126
3134 /* If size of args is variable or this was a constructor call for a stack 3127 /* If size of args is variable or this was a constructor call for a stack
3135 argument, restore saved stack-pointer value. */ 3128 argument, restore saved stack-pointer value. */
3136 3129
3512 } 3505 }
3513 3506
3514 for (; count < nargs; count++) 3507 for (; count < nargs; count++)
3515 { 3508 {
3516 rtx val = va_arg (p, rtx); 3509 rtx val = va_arg (p, rtx);
3517 enum machine_mode mode = va_arg (p, enum machine_mode); 3510 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3518 3511
3519 /* We cannot convert the arg value to the mode the library wants here; 3512 /* We cannot convert the arg value to the mode the library wants here;
3520 must do it earlier where we know the signedness of the arg. */ 3513 must do it earlier where we know the signedness of the arg. */
3521 gcc_assert (mode != BLKmode 3514 gcc_assert (mode != BLKmode
3522 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); 3515 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3702 { 3695 {
3703 enum machine_mode mode = argvec[argnum].mode; 3696 enum machine_mode mode = argvec[argnum].mode;
3704 rtx val = argvec[argnum].value; 3697 rtx val = argvec[argnum].value;
3705 rtx reg = argvec[argnum].reg; 3698 rtx reg = argvec[argnum].reg;
3706 int partial = argvec[argnum].partial; 3699 int partial = argvec[argnum].partial;
3700 unsigned int parm_align = argvec[argnum].locate.boundary;
3707 int lower_bound = 0, upper_bound = 0, i; 3701 int lower_bound = 0, upper_bound = 0, i;
3708 3702
3709 if (! (reg != 0 && partial == 0)) 3703 if (! (reg != 0 && partial == 0))
3710 { 3704 {
3711 if (ACCUMULATE_OUTGOING_ARGS) 3705 if (ACCUMULATE_OUTGOING_ARGS)
3763 emit_move_insn (argvec[argnum].save_area, stack_area); 3757 emit_move_insn (argvec[argnum].save_area, stack_area);
3764 } 3758 }
3765 } 3759 }
3766 } 3760 }
3767 3761
3768 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY, 3762 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3769 partial, reg, 0, argblock, 3763 partial, reg, 0, argblock,
3770 GEN_INT (argvec[argnum].locate.offset.constant), 3764 GEN_INT (argvec[argnum].locate.offset.constant),
3771 reg_parm_stack_space, 3765 reg_parm_stack_space,
3772 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); 3766 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3773 3767
3812 if (PUSH_ARGS_REVERSED) 3806 if (PUSH_ARGS_REVERSED)
3813 argnum = nargs - 1; 3807 argnum = nargs - 1;
3814 else 3808 else
3815 argnum = 0; 3809 argnum = 0;
3816 3810
3817 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0); 3811 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3818 3812
3819 /* Now load any reg parms into their regs. */ 3813 /* Now load any reg parms into their regs. */
3820 3814
3821 /* ARGNUM indexes the ARGVEC array in the order in which the arguments 3815 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3822 are to be pushed. */ 3816 are to be pushed. */
3871 3865
3872 /* Don't allow popping to be deferred, since then 3866 /* Don't allow popping to be deferred, since then
3873 cse'ing of library calls could delete a call and leave the pop. */ 3867 cse'ing of library calls could delete a call and leave the pop. */
3874 NO_DEFER_POP; 3868 NO_DEFER_POP;
3875 valreg = (mem_value == 0 && outmode != VOIDmode 3869 valreg = (mem_value == 0 && outmode != VOIDmode
3876 ? hard_libcall_value (outmode) : NULL_RTX); 3870 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3877 3871
3878 /* Stack must be properly aligned now. */ 3872 /* Stack must be properly aligned now. */
3879 gcc_assert (!(stack_pointer_delta 3873 gcc_assert (!(stack_pointer_delta
3880 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); 3874 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3881 3875
3940 value = gen_reg_rtx (outmode); 3934 value = gen_reg_rtx (outmode);
3941 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); 3935 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3942 } 3936 }
3943 else 3937 else
3944 { 3938 {
3945 /* Convert to the proper mode if PROMOTE_MODE has been active. */ 3939 /* Convert to the proper mode if a promotion has been active. */
3946 if (GET_MODE (valreg) != outmode) 3940 if (GET_MODE (valreg) != outmode)
3947 { 3941 {
3948 int unsignedp = TYPE_UNSIGNED (tfom); 3942 int unsignedp = TYPE_UNSIGNED (tfom);
3949 3943
3950 gcc_assert (targetm.calls.promote_function_return (tfom)); 3944 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
3951 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0) 3945 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
3952 == GET_MODE (valreg)); 3946 == GET_MODE (valreg));
3953
3954 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); 3947 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3955 } 3948 }
3956 3949
3957 if (value != 0) 3950 if (value != 0)
3958 emit_move_insn (value, valreg); 3951 emit_move_insn (value, valreg);
4300 for BLKmode is careful to avoid it. */ 4293 for BLKmode is careful to avoid it. */
4301 excess = (arg->locate.size.constant 4294 excess = (arg->locate.size.constant
4302 - int_size_in_bytes (TREE_TYPE (pval)) 4295 - int_size_in_bytes (TREE_TYPE (pval))
4303 + partial); 4296 + partial);
4304 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), 4297 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4305 NULL_RTX, TYPE_MODE (sizetype), 0); 4298 NULL_RTX, TYPE_MODE (sizetype),
4299 EXPAND_NORMAL);
4306 } 4300 }
4307 4301
4308 parm_align = arg->locate.boundary; 4302 parm_align = arg->locate.boundary;
4309 4303
4310 /* When an argument is padded down, the block is aligned to 4304 /* When an argument is padded down, the block is aligned to
4329 4323
4330 if (XEXP (x, 0) == crtl->args.internal_arg_pointer 4324 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4331 || (GET_CODE (XEXP (x, 0)) == PLUS 4325 || (GET_CODE (XEXP (x, 0)) == PLUS
4332 && XEXP (XEXP (x, 0), 0) == 4326 && XEXP (XEXP (x, 0), 0) ==
4333 crtl->args.internal_arg_pointer 4327 crtl->args.internal_arg_pointer
4334 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) 4328 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4335 { 4329 {
4336 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) 4330 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4337 i = INTVAL (XEXP (XEXP (x, 0), 1)); 4331 i = INTVAL (XEXP (XEXP (x, 0), 1));
4338 4332
4339 /* expand_call should ensure this. */ 4333 /* expand_call should ensure this. */
4340 gcc_assert (!arg->locate.offset.var 4334 gcc_assert (!arg->locate.offset.var
4341 && arg->locate.size.var == 0 4335 && arg->locate.size.var == 0
4342 && GET_CODE (size_rtx) == CONST_INT); 4336 && CONST_INT_P (size_rtx));
4343 4337
4344 if (arg->locate.offset.constant > i) 4338 if (arg->locate.offset.constant > i)
4345 { 4339 {
4346 if (arg->locate.offset.constant < i + INTVAL (size_rtx)) 4340 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4347 sibcall_failure = 1; 4341 sibcall_failure = 1;