Mercurial > hg > CbC > CbC_gcc
annotate gcc/cbc-goto.h @ 7:7ff9aed93de8
partially written goto with env...
author | Shinji KONO <kono@ie.u-ryukyu.ac.jp> |
---|---|
date | Thu, 20 Aug 2009 19:09:17 +0900 |
parents | 983001f85155 |
children | 959d4c8c8abc |
rev | line source |
---|---|
3 | 1 |
2 #define CbC_ARGS_SIZE 256 | |
3 | |
4 static void | |
5 preexpand_argument_expr (struct arg_data *, int); | |
6 | |
7 static void | |
8 determine_order(int *, int); | |
9 | |
10 static int | |
11 expand_one_arg_push (struct arg_data *, rtx, int, int, int); | |
12 | |
13 static void | |
14 push_overlaps(struct arg_data *, int); | |
15 | |
16 static int | |
17 check_frame_offset(rtx); | |
18 | |
19 | |
20 static rtx | |
21 expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype, | |
22 tree addr, | |
23 int ignore, | |
24 int flags, | |
25 int num_actuals, | |
26 struct arg_data *args, | |
27 struct args_size *args_size, | |
28 CUMULATIVE_ARGS args_so_far, | |
29 rtx old_stack_level, | |
30 int reg_parm_stack_space, | |
31 int old_pending_adj, | |
32 unsigned HOST_WIDE_INT preferred_stack_boundary, | |
33 unsigned HOST_WIDE_INT preferred_unit_stack_boundary, | |
34 rtx structure_value_addr, | |
35 //int structure_value_addr_parm, | |
36 int old_inhibit_defer_pop | |
37 ) | |
38 { | |
39 | |
40 /* folowing variables is just copied from expand_call. */ | |
41 | |
42 int pass = 0; | |
43 int i; | |
44 #ifdef REG_PARM_STACK_SPACE | |
45 /* Define the boundary of the register parm stack space that needs to be | |
46 saved, if any. */ | |
47 #endif | |
48 rtx funexp; | |
49 rtx valreg; | |
50 struct args_size adjusted_args_size; | |
51 int unadjusted_args_size; | |
52 int reg_parm_seen; | |
53 rtx static_chain_value; | |
54 int old_stack_allocated; | |
55 int old_stack_pointer_delta = 0; | |
56 int old_stack_arg_under_construction = 0; | |
57 rtx call_fusage; | |
58 char *stack_usage_map_buf = NULL; | |
59 rtx argblock = 0; | |
60 HOST_WIDE_INT struct_value_size = 0; | |
61 int pcc_struct_value = 0; | |
62 int initial_highest_arg_in_use = highest_outgoing_arg_in_use; | |
63 char *initial_stack_usage_map = stack_usage_map; | |
64 rtx tail_call_insns = NULL_RTX; | |
65 | |
66 | |
67 int *store_order; | |
68 | |
69 | |
70 int sibcall_failure = 0; | |
71 /* We want to emit any pending stack adjustments before the tail | |
72 recursion "call". That way we know any adjustment after the tail | |
73 recursion call can be ignored if we indeed use the tail | |
74 call expansion. */ | |
75 int save_pending_stack_adjust = 0; | |
76 int save_stack_pointer_delta = 0; | |
77 rtx insns; | |
78 rtx before_call, next_arg_reg; | |
79 | |
80 /* State variables we need to save and restore between | |
81 iterations. */ | |
82 save_pending_stack_adjust = pending_stack_adjust; | |
83 save_stack_pointer_delta = stack_pointer_delta; | |
84 flags |= ECF_SIBCALL; | |
85 | |
86 /* Other state variables that we must reinitialize each time | |
87 through the loop (that are not initialized by the loop itself). */ | |
88 argblock = 0; | |
89 call_fusage = 0; | |
90 | |
91 /* Start a new sequence for the normal call case. | |
92 | |
93 From this point on, if the sibling call fails, we want to set | |
94 sibcall_failure instead of continuing the loop. */ | |
95 start_sequence (); | |
96 | |
97 /* Don't let pending stack adjusts add up to too much. | |
98 Also, do all pending adjustments now if there is any chance | |
99 this might be a call to alloca or if we are expanding a sibling | |
100 call sequence or if we are calling a function that is to return | |
101 with stack pointer depressed. | |
102 Also do the adjustments before a throwing call, otherwise | |
103 exception handling can fail; PR 19225. */ | |
104 if (pending_stack_adjust >= 32 | |
105 || (pending_stack_adjust > 0 | |
106 && (flags & ECF_MAY_BE_ALLOCA)) | |
107 || (pending_stack_adjust > 0 | |
108 && flag_exceptions && !(flags & ECF_NOTHROW)) | |
109 || pass == 0) | |
110 do_pending_stack_adjust (); | |
111 | |
112 | |
113 if (pass == 0 && crtl->stack_protect_guard) | |
114 stack_protect_epilogue (); | |
115 | |
116 adjusted_args_size = *args_size; | |
117 /* Compute the actual size of the argument block required. The variable | |
118 and constant sizes must be combined, the size may have to be rounded, | |
119 and there may be a minimum required size. When generating a sibcall | |
120 pattern, do not round up, since we'll be re-using whatever space our | |
121 caller provided. */ | |
122 unadjusted_args_size | |
123 = compute_argument_block_size (reg_parm_stack_space, | |
124 &adjusted_args_size, | |
125 fndecl, fntype, | |
126 (pass == 0 ? 0 | |
127 : preferred_stack_boundary)); | |
128 | |
129 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; | |
130 | |
131 argblock = crtl->args.internal_arg_pointer; | |
132 | |
133 argblock | |
134 #ifdef STACK_GROWS_DOWNWARD | |
135 = plus_constant (argblock, crtl->args.pretend_args_size); | |
136 #else | |
137 = plus_constant (argblock, -crtl->args.pretend_args_size); | |
138 #endif | |
139 | |
140 | |
141 stored_args_map = sbitmap_alloc (args_size->constant); | |
142 sbitmap_zero (stored_args_map); | |
143 | |
144 | |
145 if (ACCUMULATE_OUTGOING_ARGS) | |
146 { | |
147 /* The save/restore code in store_one_arg handles all | |
148 cases except one: a constructor call (including a C | |
149 function returning a BLKmode struct) to initialize | |
150 an argument. */ | |
151 if (stack_arg_under_construction) | |
152 { | |
153 rtx push_size | |
154 = GEN_INT (adjusted_args_size.constant | |
155 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype | |
156 : TREE_TYPE (fndecl))) ? 0 | |
157 : reg_parm_stack_space)); | |
158 if (old_stack_level == 0) | |
159 { | |
160 emit_stack_save (SAVE_BLOCK, &old_stack_level, | |
161 NULL_RTX); | |
162 old_stack_pointer_delta = stack_pointer_delta; | |
163 old_pending_adj = pending_stack_adjust; | |
164 pending_stack_adjust = 0; | |
165 /* stack_arg_under_construction says whether a stack | |
166 arg is being constructed at the old stack level. | |
167 Pushing the stack gets a clean outgoing argument | |
168 block. */ | |
169 old_stack_arg_under_construction | |
170 = stack_arg_under_construction; | |
171 stack_arg_under_construction = 0; | |
172 /* Make a new map for the new argument list. */ | |
173 if (stack_usage_map_buf) | |
174 free (stack_usage_map_buf); | |
175 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); | |
176 stack_usage_map = stack_usage_map_buf; | |
177 memset (stack_usage_map, 0, highest_outgoing_arg_in_use); | |
178 highest_outgoing_arg_in_use = 0; | |
179 } | |
180 allocate_dynamic_stack_space (push_size, NULL_RTX, | |
181 BITS_PER_UNIT); | |
182 } | |
183 | |
184 /* If argument evaluation might modify the stack pointer, | |
185 copy the address of the argument list to a register. */ | |
186 for (i = 0; i < num_actuals; i++) | |
187 if (args[i].pass_on_stack) | |
188 { | |
189 argblock = copy_addr_to_reg (argblock); | |
190 break; | |
191 } | |
192 } | |
193 | |
194 compute_argument_addresses (args, argblock, num_actuals); | |
195 | |
196 /* in the case that | |
197 a function goto codesegment. | |
198 adjust stack space. */ | |
199 if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) ) | |
200 //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) ) | |
201 { | |
202 HOST_WIDE_INT padding; | |
203 padding = CbC_ARGS_SIZE - | |
204 (crtl->args.size - crtl->args.pretend_args_size); | |
205 if (0&&padding > 0) | |
206 anti_adjust_stack (GEN_INT (padding)); | |
207 } | |
208 | |
209 /* Now that the stack is properly aligned, pops can't safely | |
210 be deferred during the evaluation of the arguments. */ | |
211 NO_DEFER_POP; | |
212 | |
213 funexp = rtx_for_function_call (fndecl, addr); | |
214 | |
215 /* Figure out the register where the value, if any, will come back. */ | |
216 valreg = 0; | |
217 | |
218 | |
219 /* Precompute all register parameters. It isn't safe to compute anything | |
220 once we have started filling any specific hard regs. */ | |
221 precompute_register_parameters (num_actuals, args, ®_parm_seen); | |
222 | |
223 /* operand[2] is environment. */ | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
224 if (TREE_OPERAND (exp, 2)) |
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
225 static_chain_value = expand_normal (TREE_OPERAND (exp, 2)); |
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
226 else |
3 | 227 static_chain_value = 0; |
228 | |
229 | |
230 /* parallel assignment */ | |
231 store_order = alloca (num_actuals * sizeof (int)); | |
232 memset (store_order, 0, num_actuals * sizeof (int)); | |
233 | |
234 /* fill the arg[i]->exprs. */ | |
235 for (i = 0; i < num_actuals; i++) | |
236 { | |
237 if (args[i].reg == 0 || args[i].pass_on_stack) | |
238 { | |
239 preexpand_argument_expr (&args[i], | |
240 adjusted_args_size.var != 0); | |
241 } | |
242 } | |
243 | |
244 | |
245 /* push overlapped argument to stack. */ | |
246 push_overlaps(args, num_actuals); | |
247 | |
248 /* determine ordering to store arguments. | |
249 and generate RTL that store some variable temporary, if it needed.*/ | |
250 /* now... this function do nothing. */ | |
251 determine_order(store_order, num_actuals); | |
252 | |
253 /* push arguments in the order . */ | |
254 for (i = 0; i < num_actuals; i++) | |
255 { | |
256 if (args[store_order[i]].reg == 0 | |
257 || args[store_order[i]].pass_on_stack | |
258 || args[store_order[i]].partial!=0 ) | |
259 { | |
260 expand_one_arg_push (&args[store_order[i]], argblock, flags, | |
261 adjusted_args_size.var != 0, | |
262 reg_parm_stack_space); | |
263 } | |
264 } | |
265 | |
266 | |
267 /* If register arguments require space on the stack and stack space | |
268 was not preallocated, allocate stack space here for arguments | |
269 passed in registers. */ | |
270 #ifdef OUTGOING_REG_PARM_STACK_SPACE | |
271 //if (!ACCUMULATE_OUTGOING_ARGS | |
272 //&& must_preallocate == 0 && reg_parm_stack_space > 0) | |
273 //anti_adjust_stack (GEN_INT (reg_parm_stack_space)); | |
274 #endif | |
275 | |
276 /* */ | |
277 funexp = prepare_call_address (funexp, static_chain_value, | |
278 &call_fusage, reg_parm_seen, pass == 0); | |
279 | |
280 /* store args into register. */ | |
281 load_register_parameters (args, num_actuals, &call_fusage, flags, | |
282 //pass == 0, &sibcall_failure); | |
283 0, NULL); | |
284 | |
285 /* Save a pointer to the last insn before the call, so that we can | |
286 later safely search backwards to find the CALL_INSN. */ | |
287 before_call = get_last_insn (); | |
288 | |
289 /* Set up next argument register. For sibling calls on machines | |
290 with register windows this should be the incoming register. */ | |
291 #ifdef FUNCTION_INCOMING_ARG | |
292 if (pass == 0) | |
293 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, | |
294 void_type_node, 1); | |
295 else | |
296 #endif | |
297 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, | |
298 void_type_node, 1); | |
299 | |
300 /* All arguments and registers used for the call must be set up by | |
301 now! */ | |
302 | |
303 /* Stack must be properly aligned now. */ | |
304 gcc_assert (!pass | |
305 || !(stack_pointer_delta % preferred_unit_stack_boundary)); | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
306 #if 0 |
3 | 307 /* store environment. */ |
308 if ( env_tree!=NULL ) | |
309 { | |
310 emit_insn (gen_rtx_CLOBBER (VOIDmode, | |
311 gen_rtx_MEM (BLKmode, | |
312 hard_frame_pointer_rtx))); | |
313 emit_move_insn (hard_frame_pointer_rtx, env_rtx); | |
314 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
315 //pop_temp_slots (); | |
316 | |
317 emit_indirect_jump (funexp); | |
318 } | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
319 #endif |
3 | 320 |
321 /* Generate the actual call instruction. */ | |
322 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, | |
323 adjusted_args_size.constant, struct_value_size, | |
324 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, | |
325 next_arg_reg, valreg, 0, call_fusage, | |
326 flags, & args_so_far); | |
327 | |
328 /* If a non-BLKmode value is returned at the most significant end | |
329 of a register, shift the register right by the appropriate amount | |
330 and update VALREG accordingly. BLKmode values are handled by the | |
331 group load/store machinery below. */ | |
332 if (!structure_value_addr | |
333 && !pcc_struct_value | |
334 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode | |
335 && targetm.calls.return_in_msb (TREE_TYPE (exp))) | |
336 { | |
337 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) | |
338 sibcall_failure = 1; | |
339 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); | |
340 } | |
341 | |
342 | |
343 /* For calls to `setjmp', etc., inform flow.c it should complain | |
344 if nonvolatile values are live. For functions that cannot return, | |
345 inform flow that control does not fall through. */ | |
346 | |
347 if ((flags & ECF_NORETURN) || pass == 0) | |
348 { | |
349 /* The barrier must be emitted | |
350 immediately after the CALL_INSN. Some ports emit more | |
351 than just a CALL_INSN above, so we must search for it here. */ | |
352 | |
353 rtx last = get_last_insn (); | |
354 while (!CALL_P (last)) | |
355 { | |
356 last = PREV_INSN (last); | |
357 /* There was no CALL_INSN? */ | |
358 gcc_assert (last != before_call); | |
359 } | |
360 | |
361 emit_barrier_after (last); | |
362 | |
363 /* Stack adjustments after a noreturn call are dead code. | |
364 However when NO_DEFER_POP is in effect, we must preserve | |
365 stack_pointer_delta. */ | |
366 if (inhibit_defer_pop == 0) | |
367 { | |
368 stack_pointer_delta = old_stack_allocated; | |
369 pending_stack_adjust = 0; | |
370 } | |
371 } | |
372 | |
373 /* If value type not void, return an rtx for the value. */ | |
374 | |
375 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode | |
376 || ignore) | |
377 target = const0_rtx; | |
378 | |
379 if (targetm.calls.promote_function_return(funtype)) | |
380 { | |
381 /* If we promoted this return value, make the proper SUBREG. | |
382 TARGET might be const0_rtx here, so be careful. */ | |
383 if (REG_P (target) | |
384 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode | |
385 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) | |
386 { | |
387 tree type = TREE_TYPE (exp); | |
388 int unsignedp = TYPE_UNSIGNED (type); | |
389 int offset = 0; | |
390 enum machine_mode pmode; | |
391 | |
392 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); | |
393 /* If we don't promote as expected, something is wrong. */ | |
394 gcc_assert (GET_MODE (target) == pmode); | |
395 | |
396 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) | |
397 && (GET_MODE_SIZE (GET_MODE (target)) | |
398 > GET_MODE_SIZE (TYPE_MODE (type)))) | |
399 { | |
400 offset = GET_MODE_SIZE (GET_MODE (target)) | |
401 - GET_MODE_SIZE (TYPE_MODE (type)); | |
402 if (! BYTES_BIG_ENDIAN) | |
403 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; | |
404 else if (! WORDS_BIG_ENDIAN) | |
405 offset %= UNITS_PER_WORD; | |
406 } | |
407 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); | |
408 SUBREG_PROMOTED_VAR_P (target) = 1; | |
409 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); | |
410 } | |
411 } | |
412 | |
413 /* If size of args is variable or this was a constructor call for a stack | |
414 argument, restore saved stack-pointer value. */ | |
415 | |
416 if (old_stack_level) | |
417 { | |
418 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
419 stack_pointer_delta = old_stack_pointer_delta; | |
420 pending_stack_adjust = old_pending_adj; | |
421 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; | |
422 stack_arg_under_construction = old_stack_arg_under_construction; | |
423 highest_outgoing_arg_in_use = initial_highest_arg_in_use; | |
424 stack_usage_map = initial_stack_usage_map; | |
425 } | |
426 | |
427 /* If this was alloca, record the new stack level for nonlocal gotos. | |
428 Check for the handler slots since we might not have a save area | |
429 for non-local gotos. */ | |
430 | |
431 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) | |
432 update_nonlocal_goto_save_area (); | |
433 | |
434 /* Free up storage we no longer need. */ | |
435 for (i = 0; i < num_actuals; ++i) | |
436 if (args[i].aligned_regs) | |
437 free (args[i].aligned_regs); | |
438 | |
439 insns = get_insns (); | |
440 end_sequence (); | |
441 | |
442 tail_call_insns = insns; | |
443 | |
444 /* Restore the pending stack adjustment now that we have | |
445 finished generating the sibling call sequence. */ | |
446 | |
447 pending_stack_adjust = save_pending_stack_adjust; | |
448 stack_pointer_delta = save_stack_pointer_delta; | |
449 | |
450 /* Prepare arg structure for next iteration. */ | |
451 for (i = 0; i < num_actuals; i++) | |
452 { | |
453 args[i].value = 0; | |
454 args[i].aligned_regs = 0; | |
455 args[i].stack = 0; | |
456 } | |
457 | |
458 sbitmap_free (stored_args_map); | |
459 | |
460 | |
461 emit_insn(tail_call_insns); | |
462 crtl->tail_call_emit = true; | |
463 | |
464 return target; | |
465 } | |
466 | |
467 | |
468 static void | |
469 preexpand_argument_expr (struct arg_data *arg, | |
470 int variable_size ATTRIBUTE_UNUSED) | |
471 { | |
472 tree pval = arg->tree_value; | |
473 rtx reg = 0; | |
474 int partial = 0; | |
475 | |
476 if (TREE_CODE (pval) == ERROR_MARK) | |
477 return; | |
478 | |
479 /* Push a new temporary level for any temporaries we make for | |
480 this argument. */ | |
481 push_temp_slots (); | |
482 | |
483 | |
484 /* If this isn't going to be placed on both the stack and in registers, | |
485 set up the register and number of words. */ | |
486 if (! arg->pass_on_stack) | |
487 { | |
488 //if (flags & ECF_SIBCALL) | |
489 reg = arg->tail_call_reg; | |
490 //else | |
491 //reg = arg->reg; | |
492 partial = arg->partial; | |
493 } | |
494 | |
495 /* Being passed entirely in a register. We shouldn't be called in | |
496 this case. */ | |
497 gcc_assert (reg == 0 || partial != 0); | |
498 | |
499 /* If this arg needs special alignment, don't load the registers | |
500 here. */ | |
501 if (arg->n_aligned_regs != 0) | |
502 reg = 0; | |
503 | |
504 /* Start a new sequence for the arg->exprs. */ | |
505 start_sequence (); | |
506 | |
507 | |
508 if (arg->pass_on_stack) | |
509 stack_arg_under_construction++; | |
510 | |
511 arg->value = expand_expr (pval, | |
512 (partial | |
513 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) | |
514 ? NULL_RTX : arg->stack, | |
515 VOIDmode, EXPAND_STACK_PARM); | |
516 | |
517 /* If we are promoting object (or for any other reason) the mode | |
518 doesn't agree, convert the mode. */ | |
519 | |
520 if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) | |
521 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), | |
522 arg->value, arg->unsignedp); | |
523 | |
524 if (arg->pass_on_stack) | |
525 stack_arg_under_construction--; | |
526 | |
527 arg->exprs = get_insns (); | |
528 end_sequence (); | |
529 | |
530 if (arg->exprs) emit_insn(arg->exprs); | |
531 | |
532 preserve_temp_slots (arg->value); | |
533 pop_temp_slots (); | |
534 | |
535 return ; | |
536 } | |
537 | |
538 static int | |
539 expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags, | |
540 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) | |
541 { | |
542 tree pval = arg->tree_value; | |
543 int used = 0; | |
544 int i, lower_bound = 0, upper_bound = 0; | |
545 rtx reg = 0; | |
546 int partial = 0; | |
547 | |
548 /* Push a new temporary level for any temporaries we make for | |
549 this argument. */ | |
550 push_temp_slots (); | |
551 | |
552 | |
553 /* copy from store_one_arg. modify here after.*/ | |
554 /* If this isn't going to be placed on both the stack and in registers, | |
555 set up the register and number of words. */ | |
556 if (! arg->pass_on_stack) | |
557 { | |
558 //if (flags & ECF_SIBCALL) | |
559 reg = arg->tail_call_reg; | |
560 //else | |
561 //reg = arg->reg; | |
562 partial = arg->partial; | |
563 } | |
564 /* Being passed entirely in a register. We shouldn't be called in | |
565 this case. */ | |
566 gcc_assert (reg == 0 || partial != 0); | |
567 /* If this arg needs special alignment, don't load the registers | |
568 here. */ | |
569 if (arg->n_aligned_regs != 0) | |
570 reg = 0; | |
571 | |
572 | |
573 | |
574 | |
575 if (arg->value == arg->stack) | |
576 /* If the value is already in the stack slot, we are done. */ | |
577 ; | |
578 else if (arg->mode != BLKmode) | |
579 { | |
580 int size; | |
581 | |
582 /* Argument is a scalar, not entirely passed in registers. | |
583 (If part is passed in registers, arg->partial says how much | |
584 and emit_push_insn will take care of putting it there.) | |
585 | |
586 Push it, and if its size is less than the | |
587 amount of space allocated to it, | |
588 also bump stack pointer by the additional space. | |
589 Note that in C the default argument promotions | |
590 will prevent such mismatches. */ | |
591 | |
592 size = GET_MODE_SIZE (arg->mode); | |
593 /* Compute how much space the push instruction will push. | |
594 On many machines, pushing a byte will advance the stack | |
595 pointer by a halfword. */ | |
596 #ifdef PUSH_ROUNDING | |
597 size = PUSH_ROUNDING (size); | |
598 #endif | |
599 used = size; | |
600 | |
601 /* Compute how much space the argument should get: | |
602 round up to a multiple of the alignment for arguments. */ | |
603 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) | |
604 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) | |
605 / (PARM_BOUNDARY / BITS_PER_UNIT)) | |
606 * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
607 | |
608 /* This isn't already where we want it on the stack, so put it there. | |
609 This can either be done with push or copy insns. */ | |
610 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, | |
611 PARM_BOUNDARY, partial, reg, used - size, argblock, | |
612 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, | |
613 ARGS_SIZE_RTX (arg->locate.alignment_pad)); | |
614 | |
615 /* Unless this is a partially-in-register argument, the argument is now | |
616 in the stack. */ | |
617 if (partial == 0) | |
618 arg->value = arg->stack; | |
619 } | |
620 else | |
621 { | |
622 /* BLKmode, at least partly to be pushed. */ | |
623 | |
624 unsigned int parm_align; | |
625 int excess; | |
626 rtx size_rtx; | |
627 | |
628 /* Pushing a nonscalar. | |
629 If part is passed in registers, PARTIAL says how much | |
630 and emit_push_insn will take care of putting it there. */ | |
631 | |
632 /* Round its size up to a multiple | |
633 of the allocation unit for arguments. */ | |
634 | |
635 if (arg->locate.size.var != 0) | |
636 { | |
637 excess = 0; | |
638 size_rtx = ARGS_SIZE_RTX (arg->locate.size); | |
639 } | |
640 else | |
641 { | |
642 /* PUSH_ROUNDING has no effect on us, because emit_push_insn | |
643 for BLKmode is careful to avoid it. */ | |
644 excess = (arg->locate.size.constant | |
645 - int_size_in_bytes (TREE_TYPE (pval)) | |
646 + partial); | |
647 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), | |
648 NULL_RTX, TYPE_MODE (sizetype), 0); | |
649 } | |
650 | |
651 parm_align = arg->locate.boundary; | |
652 | |
653 /* When an argument is padded down, the block is aligned to | |
654 PARM_BOUNDARY, but the actual argument isn't. */ | |
655 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) | |
656 { | |
657 if (arg->locate.size.var) | |
658 parm_align = BITS_PER_UNIT; | |
659 else if (excess) | |
660 { | |
661 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; | |
662 parm_align = MIN (parm_align, excess_align); | |
663 } | |
664 } | |
665 | |
666 if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) | |
667 { | |
668 /* emit_push_insn might not work properly if arg->value and | |
669 argblock + arg->locate.offset areas overlap. */ | |
670 rtx x = arg->value; | |
671 int i = 0; | |
672 | |
673 if (XEXP (x, 0) == crtl->args.internal_arg_pointer | |
674 || (GET_CODE (XEXP (x, 0)) == PLUS | |
675 && XEXP (XEXP (x, 0), 0) == | |
676 crtl->args.internal_arg_pointer | |
677 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) | |
678 { | |
679 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) | |
680 i = INTVAL (XEXP (XEXP (x, 0), 1)); | |
681 | |
682 /* expand_call should ensure this. */ | |
683 gcc_assert (!arg->locate.offset.var | |
684 && GET_CODE (size_rtx) == CONST_INT); | |
685 } | |
686 } | |
687 | |
688 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, | |
689 parm_align, partial, reg, excess, argblock, | |
690 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, | |
691 ARGS_SIZE_RTX (arg->locate.alignment_pad)); | |
692 | |
693 /* Unless this is a partially-in-register argument, the argument is now | |
694 in the stack. | |
695 | |
696 ??? Unlike the case above, in which we want the actual | |
697 address of the data, so that we can load it directly into a | |
698 register, here we want the address of the stack slot, so that | |
699 it's properly aligned for word-by-word copying or something | |
700 like that. It's not clear that this is always correct. */ | |
701 if (partial == 0) | |
702 arg->value = arg->stack_slot; | |
703 } | |
704 | |
705 if (arg->reg && GET_CODE (arg->reg) == PARALLEL) | |
706 { | |
707 tree type = TREE_TYPE (arg->tree_value); | |
708 arg->parallel_value | |
709 = emit_group_load_into_temps (arg->reg, arg->value, type, | |
710 int_size_in_bytes (type)); | |
711 } | |
712 | |
713 /* Mark all slots this store used. */ | |
714 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) | |
715 && argblock && ! variable_size && arg->stack) | |
716 for (i = lower_bound; i < upper_bound; i++) | |
717 stack_usage_map[i] = 1; | |
718 | |
719 /* Once we have pushed something, pops can't safely | |
720 be deferred during the rest of the arguments. */ | |
721 NO_DEFER_POP; | |
722 | |
723 /* Free any temporary slots made in processing this argument. Show | |
724 that we might have taken the address of something and pushed that | |
725 as an operand. */ | |
726 preserve_temp_slots (NULL_RTX); | |
727 free_temp_slots (); | |
728 pop_temp_slots (); | |
729 | |
730 return 0; | |
731 } | |
732 | |
733 static void | |
734 determine_order(int *order, int num_actuals) | |
735 { | |
736 int i; | |
737 for (i=0; i<num_actuals; i++) | |
738 order[i] = num_actuals-i-1; | |
739 return; | |
740 } | |
741 | |
742 static void | |
743 push_overlaps(struct arg_data *args, int num_actuals) | |
744 { | |
745 int i; | |
746 | |
747 for (i=0; i<num_actuals; i++) | |
748 { | |
749 int dst_offset; /* */ | |
750 int src_offset; /* */ | |
751 rtx temp; | |
752 if ( (dst_offset=check_frame_offset(args[i].stack)) < 0 ) continue; | |
753 if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue; | |
754 | |
755 /* 退避 */ | |
756 temp = assign_temp(args[i].tree_value, 1, 0, 0); | |
757 if ( args[i].mode==BLKmode ) | |
758 emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 ); | |
759 else | |
760 emit_move_insn ( temp, args[i].value ); | |
761 args[i].value = temp; | |
762 | |
763 } | |
764 return; | |
765 } | |
766 | |
767 static int | |
768 check_frame_offset(rtx x) | |
769 { | |
770 int i; | |
771 rtx addr; | |
772 if ( !x || !MEM_P(x)) | |
773 return -1; | |
774 | |
775 addr = XEXP(x, 0); | |
776 if (addr == crtl->args.internal_arg_pointer) | |
777 i = 0; | |
778 else if (GET_CODE (addr) == PLUS | |
779 && XEXP (addr, 0) == crtl->args.internal_arg_pointer | |
780 && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
781 i = INTVAL (XEXP (addr, 1)); | |
782 else if (GET_CODE (addr) == PLUS | |
783 && GET_CODE (XEXP (addr, 0)) == CONST_INT | |
784 && XEXP (addr, 1) == crtl->args.internal_arg_pointer ) | |
785 i = INTVAL (XEXP (addr, 0)); | |
786 else | |
787 return -1; | |
788 | |
789 return i; | |
790 } | |
791 |