3
|
1
|
|
2 #define CbC_ARGS_SIZE 256
|
|
3
|
|
4 static void
|
|
5 preexpand_argument_expr (struct arg_data *, int);
|
|
6
|
|
7 static void
|
|
8 determine_order(int *, int);
|
|
9
|
|
10 static int
|
|
11 expand_one_arg_push (struct arg_data *, rtx, int, int, int);
|
|
12
|
|
13 static void
|
|
14 push_overlaps(struct arg_data *, int);
|
|
15
|
|
16 static int
|
|
17 check_frame_offset(rtx);
|
|
18
|
|
19
|
|
20 static rtx
|
|
21 expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype,
|
|
22 tree addr,
|
|
23 int ignore,
|
|
24 int flags,
|
|
25 int num_actuals,
|
|
26 struct arg_data *args,
|
|
27 struct args_size *args_size,
|
|
28 CUMULATIVE_ARGS args_so_far,
|
|
29 rtx old_stack_level,
|
|
30 int reg_parm_stack_space,
|
|
31 int old_pending_adj,
|
|
32 unsigned HOST_WIDE_INT preferred_stack_boundary,
|
|
33 unsigned HOST_WIDE_INT preferred_unit_stack_boundary,
|
|
34 rtx structure_value_addr,
|
|
35 //int structure_value_addr_parm,
|
|
36 int old_inhibit_defer_pop
|
|
37 )
|
|
38 {
|
|
39
|
|
40 /* folowing variables is just copied from expand_call. */
|
|
41
|
|
42 int pass = 0;
|
|
43 int i;
|
|
44 #ifdef REG_PARM_STACK_SPACE
|
|
45 /* Define the boundary of the register parm stack space that needs to be
|
|
46 saved, if any. */
|
|
47 #endif
|
|
48 rtx funexp;
|
|
49 rtx valreg;
|
|
50 struct args_size adjusted_args_size;
|
|
51 int unadjusted_args_size;
|
|
52 int reg_parm_seen;
|
|
53 rtx static_chain_value;
|
|
54 int old_stack_allocated;
|
|
55 int old_stack_pointer_delta = 0;
|
|
56 int old_stack_arg_under_construction = 0;
|
|
57 rtx call_fusage;
|
|
58 char *stack_usage_map_buf = NULL;
|
|
59 rtx argblock = 0;
|
|
60 HOST_WIDE_INT struct_value_size = 0;
|
|
61 int pcc_struct_value = 0;
|
|
62 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
|
|
63 char *initial_stack_usage_map = stack_usage_map;
|
|
64 rtx tail_call_insns = NULL_RTX;
|
|
65
|
|
66
|
|
67 int *store_order;
|
|
68
|
|
69
|
|
70 int sibcall_failure = 0;
|
|
71 /* We want to emit any pending stack adjustments before the tail
|
|
72 recursion "call". That way we know any adjustment after the tail
|
|
73 recursion call can be ignored if we indeed use the tail
|
|
74 call expansion. */
|
|
75 int save_pending_stack_adjust = 0;
|
|
76 int save_stack_pointer_delta = 0;
|
|
77 rtx insns;
|
|
78 rtx before_call, next_arg_reg;
|
|
79
|
|
80 /* for environment. */
|
|
81 rtx env_rtx;
|
|
82 tree env_tree;
|
|
83
|
|
84 env_tree = TREE_OPERAND(exp, 2);
|
|
85
|
|
86
|
|
87 /* State variables we need to save and restore between
|
|
88 iterations. */
|
|
89 save_pending_stack_adjust = pending_stack_adjust;
|
|
90 save_stack_pointer_delta = stack_pointer_delta;
|
|
91 flags |= ECF_SIBCALL;
|
|
92
|
|
93 /* Other state variables that we must reinitialize each time
|
|
94 through the loop (that are not initialized by the loop itself). */
|
|
95 argblock = 0;
|
|
96 call_fusage = 0;
|
|
97
|
|
98 /* Start a new sequence for the normal call case.
|
|
99
|
|
100 From this point on, if the sibling call fails, we want to set
|
|
101 sibcall_failure instead of continuing the loop. */
|
|
102 start_sequence ();
|
|
103
|
|
104 /* Don't let pending stack adjusts add up to too much.
|
|
105 Also, do all pending adjustments now if there is any chance
|
|
106 this might be a call to alloca or if we are expanding a sibling
|
|
107 call sequence or if we are calling a function that is to return
|
|
108 with stack pointer depressed.
|
|
109 Also do the adjustments before a throwing call, otherwise
|
|
110 exception handling can fail; PR 19225. */
|
|
111 if (pending_stack_adjust >= 32
|
|
112 || (pending_stack_adjust > 0
|
|
113 && (flags & ECF_MAY_BE_ALLOCA))
|
|
114 || (pending_stack_adjust > 0
|
|
115 && flag_exceptions && !(flags & ECF_NOTHROW))
|
|
116 || pass == 0)
|
|
117 do_pending_stack_adjust ();
|
|
118
|
|
119
|
|
120 if (pass == 0 && crtl->stack_protect_guard)
|
|
121 stack_protect_epilogue ();
|
|
122
|
|
123 adjusted_args_size = *args_size;
|
|
124 /* Compute the actual size of the argument block required. The variable
|
|
125 and constant sizes must be combined, the size may have to be rounded,
|
|
126 and there may be a minimum required size. When generating a sibcall
|
|
127 pattern, do not round up, since we'll be re-using whatever space our
|
|
128 caller provided. */
|
|
129 unadjusted_args_size
|
|
130 = compute_argument_block_size (reg_parm_stack_space,
|
|
131 &adjusted_args_size,
|
|
132 fndecl, fntype,
|
|
133 (pass == 0 ? 0
|
|
134 : preferred_stack_boundary));
|
|
135
|
|
136 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
|
|
137
|
|
138 /* The argument block when performing a sibling call is the
|
|
139 incoming argument block. */
|
|
140 /*
|
|
141 if ( 0 && !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) )
|
|
142 {
|
|
143 argblock = virtual_outgoing_args_rtx;
|
|
144 }
|
|
145 else
|
|
146 */
|
|
147
|
|
148 if ( env_tree!=NULL_TREE )
|
|
149 {
|
|
150 /*
|
|
151 * ebp <= TREE_VALUE ( env_tree );
|
|
152 * argsp <= TREE_PURPOSE ( env_tree );
|
|
153 *
|
|
154 */
|
|
155 //rtx tmp;
|
|
156 /* compute environment. */
|
|
157 push_temp_slots ();
|
|
158 /* expand_expr shouldn't be here... it's moved later. */
|
|
159 //env_rtx = expand_expr ( env_tree, NULL_RTX,
|
|
160 //VOIDmode, EXPAND_STACK_PARM);
|
|
161 //argblock = hard_frame_pointer_rtx;
|
|
162 //argblock = env_rtx;
|
|
163 //argblock = plus_constant (argblock, -current_function_arg_offset_rtx);
|
|
164 //tmp = gen_rtx_MINUS (Pmode, hard_frame_pointer_rtx, virtual_incoming_args_rtx);
|
|
165 //argblock = gen_rtx_MINUS (Pmode, env_rtx, tmp);
|
|
166 //argblock = plus_constant (argblock, frame_offset);
|
|
167 argblock = expand_expr ( TREE_VALUE (env_tree), NULL_RTX,
|
|
168 VOIDmode, EXPAND_STACK_PARM);
|
|
169 env_rtx = expand_expr ( TREE_PURPOSE (env_tree), NULL_RTX,
|
|
170 VOIDmode, EXPAND_STACK_PARM);
|
|
171 preserve_temp_slots (argblock);
|
|
172 pop_temp_slots ();
|
|
173 }
|
|
174 else
|
|
175 {
|
|
176 //argblock = virtual_incoming_args_rtx;
|
|
177 argblock = crtl->args.internal_arg_pointer;
|
|
178
|
|
179 argblock
|
|
180 #ifdef STACK_GROWS_DOWNWARD
|
|
181 = plus_constant (argblock, crtl->args.pretend_args_size);
|
|
182 #else
|
|
183 = plus_constant (argblock, -crtl->args.pretend_args_size);
|
|
184 #endif
|
|
185 }
|
|
186
|
|
187
|
|
188 stored_args_map = sbitmap_alloc (args_size->constant);
|
|
189 sbitmap_zero (stored_args_map);
|
|
190
|
|
191
|
|
192 if (ACCUMULATE_OUTGOING_ARGS)
|
|
193 {
|
|
194 /* The save/restore code in store_one_arg handles all
|
|
195 cases except one: a constructor call (including a C
|
|
196 function returning a BLKmode struct) to initialize
|
|
197 an argument. */
|
|
198 if (stack_arg_under_construction)
|
|
199 {
|
|
200 rtx push_size
|
|
201 = GEN_INT (adjusted_args_size.constant
|
|
202 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
|
|
203 : TREE_TYPE (fndecl))) ? 0
|
|
204 : reg_parm_stack_space));
|
|
205 if (old_stack_level == 0)
|
|
206 {
|
|
207 emit_stack_save (SAVE_BLOCK, &old_stack_level,
|
|
208 NULL_RTX);
|
|
209 old_stack_pointer_delta = stack_pointer_delta;
|
|
210 old_pending_adj = pending_stack_adjust;
|
|
211 pending_stack_adjust = 0;
|
|
212 /* stack_arg_under_construction says whether a stack
|
|
213 arg is being constructed at the old stack level.
|
|
214 Pushing the stack gets a clean outgoing argument
|
|
215 block. */
|
|
216 old_stack_arg_under_construction
|
|
217 = stack_arg_under_construction;
|
|
218 stack_arg_under_construction = 0;
|
|
219 /* Make a new map for the new argument list. */
|
|
220 if (stack_usage_map_buf)
|
|
221 free (stack_usage_map_buf);
|
|
222 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
|
|
223 stack_usage_map = stack_usage_map_buf;
|
|
224 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
|
|
225 highest_outgoing_arg_in_use = 0;
|
|
226 }
|
|
227 allocate_dynamic_stack_space (push_size, NULL_RTX,
|
|
228 BITS_PER_UNIT);
|
|
229 }
|
|
230
|
|
231 /* If argument evaluation might modify the stack pointer,
|
|
232 copy the address of the argument list to a register. */
|
|
233 for (i = 0; i < num_actuals; i++)
|
|
234 if (args[i].pass_on_stack)
|
|
235 {
|
|
236 argblock = copy_addr_to_reg (argblock);
|
|
237 break;
|
|
238 }
|
|
239 }
|
|
240
|
|
241 compute_argument_addresses (args, argblock, num_actuals);
|
|
242
|
|
243 /* in the case that
|
|
244 a function goto codesegment.
|
|
245 adjust stack space. */
|
|
246 if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) )
|
|
247 //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) )
|
|
248 {
|
|
249 HOST_WIDE_INT padding;
|
|
250 padding = CbC_ARGS_SIZE -
|
|
251 (crtl->args.size - crtl->args.pretend_args_size);
|
|
252 if (0&&padding > 0)
|
|
253 anti_adjust_stack (GEN_INT (padding));
|
|
254 }
|
|
255
|
|
256 /* Now that the stack is properly aligned, pops can't safely
|
|
257 be deferred during the evaluation of the arguments. */
|
|
258 NO_DEFER_POP;
|
|
259
|
|
260 funexp = rtx_for_function_call (fndecl, addr);
|
|
261
|
|
262 /* Figure out the register where the value, if any, will come back. */
|
|
263 valreg = 0;
|
|
264
|
|
265
|
|
266 /* Precompute all register parameters. It isn't safe to compute anything
|
|
267 once we have started filling any specific hard regs. */
|
|
268 precompute_register_parameters (num_actuals, args, ®_parm_seen);
|
|
269
|
|
270 /* operand[2] is environment. */
|
|
271 //if (TREE_OPERAND (exp, 2))
|
|
272 //static_chain_value = expand_normal (TREE_OPERAND (exp, 2));
|
|
273 //else
|
|
274 static_chain_value = 0;
|
|
275
|
|
276
|
|
277 /* parallel assignment */
|
|
278 store_order = alloca (num_actuals * sizeof (int));
|
|
279 memset (store_order, 0, num_actuals * sizeof (int));
|
|
280
|
|
281 /* fill the arg[i]->exprs. */
|
|
282 for (i = 0; i < num_actuals; i++)
|
|
283 {
|
|
284 if (args[i].reg == 0 || args[i].pass_on_stack)
|
|
285 {
|
|
286 preexpand_argument_expr (&args[i],
|
|
287 adjusted_args_size.var != 0);
|
|
288 }
|
|
289 }
|
|
290
|
|
291
|
|
292 /* push overlapped argument to stack. */
|
|
293 push_overlaps(args, num_actuals);
|
|
294
|
|
295 /* determine ordering to store arguments.
|
|
296 and generate RTL that store some variable temporary, if it needed.*/
|
|
297 /* now... this function do nothing. */
|
|
298 determine_order(store_order, num_actuals);
|
|
299
|
|
300 /* push arguments in the order . */
|
|
301 for (i = 0; i < num_actuals; i++)
|
|
302 {
|
|
303 if (args[store_order[i]].reg == 0
|
|
304 || args[store_order[i]].pass_on_stack
|
|
305 || args[store_order[i]].partial!=0 )
|
|
306 {
|
|
307 expand_one_arg_push (&args[store_order[i]], argblock, flags,
|
|
308 adjusted_args_size.var != 0,
|
|
309 reg_parm_stack_space);
|
|
310 }
|
|
311 }
|
|
312
|
|
313
|
|
314 /* If register arguments require space on the stack and stack space
|
|
315 was not preallocated, allocate stack space here for arguments
|
|
316 passed in registers. */
|
|
317 #ifdef OUTGOING_REG_PARM_STACK_SPACE
|
|
318 //if (!ACCUMULATE_OUTGOING_ARGS
|
|
319 //&& must_preallocate == 0 && reg_parm_stack_space > 0)
|
|
320 //anti_adjust_stack (GEN_INT (reg_parm_stack_space));
|
|
321 #endif
|
|
322
|
|
323 /* */
|
|
324 funexp = prepare_call_address (funexp, static_chain_value,
|
|
325 &call_fusage, reg_parm_seen, pass == 0);
|
|
326
|
|
327 /* store args into register. */
|
|
328 load_register_parameters (args, num_actuals, &call_fusage, flags,
|
|
329 //pass == 0, &sibcall_failure);
|
|
330 0, NULL);
|
|
331
|
|
332 /* Save a pointer to the last insn before the call, so that we can
|
|
333 later safely search backwards to find the CALL_INSN. */
|
|
334 before_call = get_last_insn ();
|
|
335
|
|
336 /* Set up next argument register. For sibling calls on machines
|
|
337 with register windows this should be the incoming register. */
|
|
338 #ifdef FUNCTION_INCOMING_ARG
|
|
339 if (pass == 0)
|
|
340 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
|
|
341 void_type_node, 1);
|
|
342 else
|
|
343 #endif
|
|
344 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
|
|
345 void_type_node, 1);
|
|
346
|
|
347 /* All arguments and registers used for the call must be set up by
|
|
348 now! */
|
|
349
|
|
350 /* Stack must be properly aligned now. */
|
|
351 gcc_assert (!pass
|
|
352 || !(stack_pointer_delta % preferred_unit_stack_boundary));
|
|
353
|
|
354 /* store environment. */
|
|
355 if ( env_tree!=NULL )
|
|
356 {
|
|
357 emit_insn (gen_rtx_CLOBBER (VOIDmode,
|
|
358 gen_rtx_MEM (BLKmode,
|
|
359 hard_frame_pointer_rtx)));
|
|
360 emit_move_insn (hard_frame_pointer_rtx, env_rtx);
|
|
361 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
|
|
362 //pop_temp_slots ();
|
|
363
|
|
364 emit_indirect_jump (funexp);
|
|
365 }
|
|
366
|
|
367 /* Generate the actual call instruction. */
|
|
368 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
|
|
369 adjusted_args_size.constant, struct_value_size,
|
|
370 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
|
|
371 next_arg_reg, valreg, 0, call_fusage,
|
|
372 flags, & args_so_far);
|
|
373
|
|
374 /* If a non-BLKmode value is returned at the most significant end
|
|
375 of a register, shift the register right by the appropriate amount
|
|
376 and update VALREG accordingly. BLKmode values are handled by the
|
|
377 group load/store machinery below. */
|
|
378 if (!structure_value_addr
|
|
379 && !pcc_struct_value
|
|
380 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
|
|
381 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
|
|
382 {
|
|
383 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
|
|
384 sibcall_failure = 1;
|
|
385 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
|
|
386 }
|
|
387
|
|
388
|
|
389 /* For calls to `setjmp', etc., inform flow.c it should complain
|
|
390 if nonvolatile values are live. For functions that cannot return,
|
|
391 inform flow that control does not fall through. */
|
|
392
|
|
393 if ((flags & ECF_NORETURN) || pass == 0)
|
|
394 {
|
|
395 /* The barrier must be emitted
|
|
396 immediately after the CALL_INSN. Some ports emit more
|
|
397 than just a CALL_INSN above, so we must search for it here. */
|
|
398
|
|
399 rtx last = get_last_insn ();
|
|
400 while (!CALL_P (last))
|
|
401 {
|
|
402 last = PREV_INSN (last);
|
|
403 /* There was no CALL_INSN? */
|
|
404 gcc_assert (last != before_call);
|
|
405 }
|
|
406
|
|
407 emit_barrier_after (last);
|
|
408
|
|
409 /* Stack adjustments after a noreturn call are dead code.
|
|
410 However when NO_DEFER_POP is in effect, we must preserve
|
|
411 stack_pointer_delta. */
|
|
412 if (inhibit_defer_pop == 0)
|
|
413 {
|
|
414 stack_pointer_delta = old_stack_allocated;
|
|
415 pending_stack_adjust = 0;
|
|
416 }
|
|
417 }
|
|
418
|
|
419 /* If value type not void, return an rtx for the value. */
|
|
420
|
|
421 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
|
|
422 || ignore)
|
|
423 target = const0_rtx;
|
|
424
|
|
425 if (targetm.calls.promote_function_return(funtype))
|
|
426 {
|
|
427 /* If we promoted this return value, make the proper SUBREG.
|
|
428 TARGET might be const0_rtx here, so be careful. */
|
|
429 if (REG_P (target)
|
|
430 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
|
|
431 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
|
|
432 {
|
|
433 tree type = TREE_TYPE (exp);
|
|
434 int unsignedp = TYPE_UNSIGNED (type);
|
|
435 int offset = 0;
|
|
436 enum machine_mode pmode;
|
|
437
|
|
438 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
|
|
439 /* If we don't promote as expected, something is wrong. */
|
|
440 gcc_assert (GET_MODE (target) == pmode);
|
|
441
|
|
442 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
|
|
443 && (GET_MODE_SIZE (GET_MODE (target))
|
|
444 > GET_MODE_SIZE (TYPE_MODE (type))))
|
|
445 {
|
|
446 offset = GET_MODE_SIZE (GET_MODE (target))
|
|
447 - GET_MODE_SIZE (TYPE_MODE (type));
|
|
448 if (! BYTES_BIG_ENDIAN)
|
|
449 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
|
|
450 else if (! WORDS_BIG_ENDIAN)
|
|
451 offset %= UNITS_PER_WORD;
|
|
452 }
|
|
453 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
|
|
454 SUBREG_PROMOTED_VAR_P (target) = 1;
|
|
455 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
|
|
456 }
|
|
457 }
|
|
458
|
|
459 /* If size of args is variable or this was a constructor call for a stack
|
|
460 argument, restore saved stack-pointer value. */
|
|
461
|
|
462 if (old_stack_level)
|
|
463 {
|
|
464 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
|
|
465 stack_pointer_delta = old_stack_pointer_delta;
|
|
466 pending_stack_adjust = old_pending_adj;
|
|
467 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
|
|
468 stack_arg_under_construction = old_stack_arg_under_construction;
|
|
469 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
|
|
470 stack_usage_map = initial_stack_usage_map;
|
|
471 }
|
|
472
|
|
473 /* If this was alloca, record the new stack level for nonlocal gotos.
|
|
474 Check for the handler slots since we might not have a save area
|
|
475 for non-local gotos. */
|
|
476
|
|
477 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
|
|
478 update_nonlocal_goto_save_area ();
|
|
479
|
|
480 /* Free up storage we no longer need. */
|
|
481 for (i = 0; i < num_actuals; ++i)
|
|
482 if (args[i].aligned_regs)
|
|
483 free (args[i].aligned_regs);
|
|
484
|
|
485 insns = get_insns ();
|
|
486 end_sequence ();
|
|
487
|
|
488 tail_call_insns = insns;
|
|
489
|
|
490 /* Restore the pending stack adjustment now that we have
|
|
491 finished generating the sibling call sequence. */
|
|
492
|
|
493 pending_stack_adjust = save_pending_stack_adjust;
|
|
494 stack_pointer_delta = save_stack_pointer_delta;
|
|
495
|
|
496 /* Prepare arg structure for next iteration. */
|
|
497 for (i = 0; i < num_actuals; i++)
|
|
498 {
|
|
499 args[i].value = 0;
|
|
500 args[i].aligned_regs = 0;
|
|
501 args[i].stack = 0;
|
|
502 }
|
|
503
|
|
504 sbitmap_free (stored_args_map);
|
|
505
|
|
506
|
|
507 emit_insn(tail_call_insns);
|
|
508 crtl->tail_call_emit = true;
|
|
509
|
|
510 return target;
|
|
511 }
|
|
512
|
|
513
|
|
514 static void
|
|
515 preexpand_argument_expr (struct arg_data *arg,
|
|
516 int variable_size ATTRIBUTE_UNUSED)
|
|
517 {
|
|
518 tree pval = arg->tree_value;
|
|
519 rtx reg = 0;
|
|
520 int partial = 0;
|
|
521
|
|
522 if (TREE_CODE (pval) == ERROR_MARK)
|
|
523 return;
|
|
524
|
|
525 /* Push a new temporary level for any temporaries we make for
|
|
526 this argument. */
|
|
527 push_temp_slots ();
|
|
528
|
|
529
|
|
530 /* If this isn't going to be placed on both the stack and in registers,
|
|
531 set up the register and number of words. */
|
|
532 if (! arg->pass_on_stack)
|
|
533 {
|
|
534 //if (flags & ECF_SIBCALL)
|
|
535 reg = arg->tail_call_reg;
|
|
536 //else
|
|
537 //reg = arg->reg;
|
|
538 partial = arg->partial;
|
|
539 }
|
|
540
|
|
541 /* Being passed entirely in a register. We shouldn't be called in
|
|
542 this case. */
|
|
543 gcc_assert (reg == 0 || partial != 0);
|
|
544
|
|
545 /* If this arg needs special alignment, don't load the registers
|
|
546 here. */
|
|
547 if (arg->n_aligned_regs != 0)
|
|
548 reg = 0;
|
|
549
|
|
550 /* Start a new sequence for the arg->exprs. */
|
|
551 start_sequence ();
|
|
552
|
|
553
|
|
554 if (arg->pass_on_stack)
|
|
555 stack_arg_under_construction++;
|
|
556
|
|
557 arg->value = expand_expr (pval,
|
|
558 (partial
|
|
559 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
|
|
560 ? NULL_RTX : arg->stack,
|
|
561 VOIDmode, EXPAND_STACK_PARM);
|
|
562
|
|
563 /* If we are promoting object (or for any other reason) the mode
|
|
564 doesn't agree, convert the mode. */
|
|
565
|
|
566 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
|
|
567 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
|
|
568 arg->value, arg->unsignedp);
|
|
569
|
|
570 if (arg->pass_on_stack)
|
|
571 stack_arg_under_construction--;
|
|
572
|
|
573 arg->exprs = get_insns ();
|
|
574 end_sequence ();
|
|
575
|
|
576 if (arg->exprs) emit_insn(arg->exprs);
|
|
577
|
|
578 preserve_temp_slots (arg->value);
|
|
579 pop_temp_slots ();
|
|
580
|
|
581 return ;
|
|
582 }
|
|
583
|
|
584 static int
|
|
585 expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags,
|
|
586 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
|
|
587 {
|
|
588 tree pval = arg->tree_value;
|
|
589 int used = 0;
|
|
590 int i, lower_bound = 0, upper_bound = 0;
|
|
591 rtx reg = 0;
|
|
592 int partial = 0;
|
|
593
|
|
594 /* Push a new temporary level for any temporaries we make for
|
|
595 this argument. */
|
|
596 push_temp_slots ();
|
|
597
|
|
598
|
|
599 /* copy from store_one_arg. modify here after.*/
|
|
600 /* If this isn't going to be placed on both the stack and in registers,
|
|
601 set up the register and number of words. */
|
|
602 if (! arg->pass_on_stack)
|
|
603 {
|
|
604 //if (flags & ECF_SIBCALL)
|
|
605 reg = arg->tail_call_reg;
|
|
606 //else
|
|
607 //reg = arg->reg;
|
|
608 partial = arg->partial;
|
|
609 }
|
|
610 /* Being passed entirely in a register. We shouldn't be called in
|
|
611 this case. */
|
|
612 gcc_assert (reg == 0 || partial != 0);
|
|
613 /* If this arg needs special alignment, don't load the registers
|
|
614 here. */
|
|
615 if (arg->n_aligned_regs != 0)
|
|
616 reg = 0;
|
|
617
|
|
618
|
|
619
|
|
620
|
|
621 if (arg->value == arg->stack)
|
|
622 /* If the value is already in the stack slot, we are done. */
|
|
623 ;
|
|
624 else if (arg->mode != BLKmode)
|
|
625 {
|
|
626 int size;
|
|
627
|
|
628 /* Argument is a scalar, not entirely passed in registers.
|
|
629 (If part is passed in registers, arg->partial says how much
|
|
630 and emit_push_insn will take care of putting it there.)
|
|
631
|
|
632 Push it, and if its size is less than the
|
|
633 amount of space allocated to it,
|
|
634 also bump stack pointer by the additional space.
|
|
635 Note that in C the default argument promotions
|
|
636 will prevent such mismatches. */
|
|
637
|
|
638 size = GET_MODE_SIZE (arg->mode);
|
|
639 /* Compute how much space the push instruction will push.
|
|
640 On many machines, pushing a byte will advance the stack
|
|
641 pointer by a halfword. */
|
|
642 #ifdef PUSH_ROUNDING
|
|
643 size = PUSH_ROUNDING (size);
|
|
644 #endif
|
|
645 used = size;
|
|
646
|
|
647 /* Compute how much space the argument should get:
|
|
648 round up to a multiple of the alignment for arguments. */
|
|
649 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
|
|
650 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
|
|
651 / (PARM_BOUNDARY / BITS_PER_UNIT))
|
|
652 * (PARM_BOUNDARY / BITS_PER_UNIT));
|
|
653
|
|
654 /* This isn't already where we want it on the stack, so put it there.
|
|
655 This can either be done with push or copy insns. */
|
|
656 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
|
|
657 PARM_BOUNDARY, partial, reg, used - size, argblock,
|
|
658 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
|
|
659 ARGS_SIZE_RTX (arg->locate.alignment_pad));
|
|
660
|
|
661 /* Unless this is a partially-in-register argument, the argument is now
|
|
662 in the stack. */
|
|
663 if (partial == 0)
|
|
664 arg->value = arg->stack;
|
|
665 }
|
|
666 else
|
|
667 {
|
|
668 /* BLKmode, at least partly to be pushed. */
|
|
669
|
|
670 unsigned int parm_align;
|
|
671 int excess;
|
|
672 rtx size_rtx;
|
|
673
|
|
674 /* Pushing a nonscalar.
|
|
675 If part is passed in registers, PARTIAL says how much
|
|
676 and emit_push_insn will take care of putting it there. */
|
|
677
|
|
678 /* Round its size up to a multiple
|
|
679 of the allocation unit for arguments. */
|
|
680
|
|
681 if (arg->locate.size.var != 0)
|
|
682 {
|
|
683 excess = 0;
|
|
684 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
|
|
685 }
|
|
686 else
|
|
687 {
|
|
688 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
|
|
689 for BLKmode is careful to avoid it. */
|
|
690 excess = (arg->locate.size.constant
|
|
691 - int_size_in_bytes (TREE_TYPE (pval))
|
|
692 + partial);
|
|
693 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
|
|
694 NULL_RTX, TYPE_MODE (sizetype), 0);
|
|
695 }
|
|
696
|
|
697 parm_align = arg->locate.boundary;
|
|
698
|
|
699 /* When an argument is padded down, the block is aligned to
|
|
700 PARM_BOUNDARY, but the actual argument isn't. */
|
|
701 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
|
|
702 {
|
|
703 if (arg->locate.size.var)
|
|
704 parm_align = BITS_PER_UNIT;
|
|
705 else if (excess)
|
|
706 {
|
|
707 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
|
|
708 parm_align = MIN (parm_align, excess_align);
|
|
709 }
|
|
710 }
|
|
711
|
|
712 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
|
|
713 {
|
|
714 /* emit_push_insn might not work properly if arg->value and
|
|
715 argblock + arg->locate.offset areas overlap. */
|
|
716 rtx x = arg->value;
|
|
717 int i = 0;
|
|
718
|
|
719 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
|
|
720 || (GET_CODE (XEXP (x, 0)) == PLUS
|
|
721 && XEXP (XEXP (x, 0), 0) ==
|
|
722 crtl->args.internal_arg_pointer
|
|
723 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
|
|
724 {
|
|
725 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
|
|
726 i = INTVAL (XEXP (XEXP (x, 0), 1));
|
|
727
|
|
728 /* expand_call should ensure this. */
|
|
729 gcc_assert (!arg->locate.offset.var
|
|
730 && GET_CODE (size_rtx) == CONST_INT);
|
|
731 }
|
|
732 }
|
|
733
|
|
734 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
|
|
735 parm_align, partial, reg, excess, argblock,
|
|
736 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
|
|
737 ARGS_SIZE_RTX (arg->locate.alignment_pad));
|
|
738
|
|
739 /* Unless this is a partially-in-register argument, the argument is now
|
|
740 in the stack.
|
|
741
|
|
742 ??? Unlike the case above, in which we want the actual
|
|
743 address of the data, so that we can load it directly into a
|
|
744 register, here we want the address of the stack slot, so that
|
|
745 it's properly aligned for word-by-word copying or something
|
|
746 like that. It's not clear that this is always correct. */
|
|
747 if (partial == 0)
|
|
748 arg->value = arg->stack_slot;
|
|
749 }
|
|
750
|
|
751 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
|
|
752 {
|
|
753 tree type = TREE_TYPE (arg->tree_value);
|
|
754 arg->parallel_value
|
|
755 = emit_group_load_into_temps (arg->reg, arg->value, type,
|
|
756 int_size_in_bytes (type));
|
|
757 }
|
|
758
|
|
759 /* Mark all slots this store used. */
|
|
760 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
|
|
761 && argblock && ! variable_size && arg->stack)
|
|
762 for (i = lower_bound; i < upper_bound; i++)
|
|
763 stack_usage_map[i] = 1;
|
|
764
|
|
765 /* Once we have pushed something, pops can't safely
|
|
766 be deferred during the rest of the arguments. */
|
|
767 NO_DEFER_POP;
|
|
768
|
|
769 /* Free any temporary slots made in processing this argument. Show
|
|
770 that we might have taken the address of something and pushed that
|
|
771 as an operand. */
|
|
772 preserve_temp_slots (NULL_RTX);
|
|
773 free_temp_slots ();
|
|
774 pop_temp_slots ();
|
|
775
|
|
776 return 0;
|
|
777 }
|
|
778
|
|
779 static void
|
|
780 determine_order(int *order, int num_actuals)
|
|
781 {
|
|
782 int i;
|
|
783 for (i=0; i<num_actuals; i++)
|
|
784 order[i] = num_actuals-i-1;
|
|
785 return;
|
|
786 }
|
|
787
|
|
788 static void
|
|
789 push_overlaps(struct arg_data *args, int num_actuals)
|
|
790 {
|
|
791 int i;
|
|
792
|
|
793 for (i=0; i<num_actuals; i++)
|
|
794 {
|
|
795 int dst_offset; /* */
|
|
796 int src_offset; /* */
|
|
797 rtx temp;
|
|
798 if ( (dst_offset=check_frame_offset(args[i].stack)) < 0 ) continue;
|
|
799 if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue;
|
|
800
|
|
801 /* 退避 */
|
|
802 temp = assign_temp(args[i].tree_value, 1, 0, 0);
|
|
803 if ( args[i].mode==BLKmode )
|
|
804 emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 );
|
|
805 else
|
|
806 emit_move_insn ( temp, args[i].value );
|
|
807 args[i].value = temp;
|
|
808
|
|
809 }
|
|
810 return;
|
|
811 }
|
|
812
|
|
813 static int
|
|
814 check_frame_offset(rtx x)
|
|
815 {
|
|
816 int i;
|
|
817 rtx addr;
|
|
818 if ( !x || !MEM_P(x))
|
|
819 return -1;
|
|
820
|
|
821 addr = XEXP(x, 0);
|
|
822 if (addr == crtl->args.internal_arg_pointer)
|
|
823 i = 0;
|
|
824 else if (GET_CODE (addr) == PLUS
|
|
825 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
|
|
826 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
|
|
827 i = INTVAL (XEXP (addr, 1));
|
|
828 else if (GET_CODE (addr) == PLUS
|
|
829 && GET_CODE (XEXP (addr, 0)) == CONST_INT
|
|
830 && XEXP (addr, 1) == crtl->args.internal_arg_pointer )
|
|
831 i = INTVAL (XEXP (addr, 0));
|
|
832 else
|
|
833 return -1;
|
|
834
|
|
835 return i;
|
|
836 }
|
|
837
|