Mercurial > hg > CbC > CbC_gcc
annotate gcc/cbc-goto.h @ 21:959d4c8c8abc
add conv.c conv1.c
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 29 Sep 2009 20:15:16 +0900 |
parents | 7ff9aed93de8 |
children | 2476ed92181e |
rev | line source |
---|---|
3 | 1 |
2 static void | |
3 preexpand_argument_expr (struct arg_data *, int); | |
4 | |
5 static void | |
6 determine_order(int *, int); | |
7 | |
8 static int | |
9 expand_one_arg_push (struct arg_data *, rtx, int, int, int); | |
10 | |
11 static void | |
12 push_overlaps(struct arg_data *, int); | |
13 | |
14 static int | |
15 check_frame_offset(rtx); | |
16 | |
17 | |
18 static rtx | |
19 expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype, | |
20 tree addr, | |
21 int ignore, | |
22 int flags, | |
23 int num_actuals, | |
24 struct arg_data *args, | |
25 struct args_size *args_size, | |
26 CUMULATIVE_ARGS args_so_far, | |
27 rtx old_stack_level, | |
28 int reg_parm_stack_space, | |
29 int old_pending_adj, | |
30 unsigned HOST_WIDE_INT preferred_stack_boundary, | |
31 unsigned HOST_WIDE_INT preferred_unit_stack_boundary, | |
32 rtx structure_value_addr, | |
33 //int structure_value_addr_parm, | |
34 int old_inhibit_defer_pop | |
35 ) | |
36 { | |
37 | |
38 /* folowing variables is just copied from expand_call. */ | |
39 | |
40 int pass = 0; | |
41 int i; | |
42 #ifdef REG_PARM_STACK_SPACE | |
43 /* Define the boundary of the register parm stack space that needs to be | |
44 saved, if any. */ | |
45 #endif | |
46 rtx funexp; | |
47 rtx valreg; | |
48 struct args_size adjusted_args_size; | |
49 int unadjusted_args_size; | |
50 int reg_parm_seen; | |
51 rtx static_chain_value; | |
52 int old_stack_allocated; | |
53 int old_stack_pointer_delta = 0; | |
54 int old_stack_arg_under_construction = 0; | |
55 rtx call_fusage; | |
56 char *stack_usage_map_buf = NULL; | |
57 rtx argblock = 0; | |
58 HOST_WIDE_INT struct_value_size = 0; | |
59 int pcc_struct_value = 0; | |
60 int initial_highest_arg_in_use = highest_outgoing_arg_in_use; | |
61 char *initial_stack_usage_map = stack_usage_map; | |
62 rtx tail_call_insns = NULL_RTX; | |
63 | |
64 | |
65 int *store_order; | |
66 | |
67 | |
68 int sibcall_failure = 0; | |
69 /* We want to emit any pending stack adjustments before the tail | |
70 recursion "call". That way we know any adjustment after the tail | |
71 recursion call can be ignored if we indeed use the tail | |
72 call expansion. */ | |
73 int save_pending_stack_adjust = 0; | |
74 int save_stack_pointer_delta = 0; | |
75 rtx insns; | |
76 rtx before_call, next_arg_reg; | |
77 | |
78 /* State variables we need to save and restore between | |
79 iterations. */ | |
80 save_pending_stack_adjust = pending_stack_adjust; | |
81 save_stack_pointer_delta = stack_pointer_delta; | |
82 flags |= ECF_SIBCALL; | |
83 | |
84 /* Other state variables that we must reinitialize each time | |
85 through the loop (that are not initialized by the loop itself). */ | |
86 argblock = 0; | |
87 call_fusage = 0; | |
88 | |
89 /* Start a new sequence for the normal call case. | |
90 | |
91 From this point on, if the sibling call fails, we want to set | |
92 sibcall_failure instead of continuing the loop. */ | |
93 start_sequence (); | |
94 | |
95 /* Don't let pending stack adjusts add up to too much. | |
96 Also, do all pending adjustments now if there is any chance | |
97 this might be a call to alloca or if we are expanding a sibling | |
98 call sequence or if we are calling a function that is to return | |
99 with stack pointer depressed. | |
100 Also do the adjustments before a throwing call, otherwise | |
101 exception handling can fail; PR 19225. */ | |
102 if (pending_stack_adjust >= 32 | |
103 || (pending_stack_adjust > 0 | |
104 && (flags & ECF_MAY_BE_ALLOCA)) | |
105 || (pending_stack_adjust > 0 | |
106 && flag_exceptions && !(flags & ECF_NOTHROW)) | |
107 || pass == 0) | |
108 do_pending_stack_adjust (); | |
109 | |
110 | |
111 if (pass == 0 && crtl->stack_protect_guard) | |
112 stack_protect_epilogue (); | |
113 | |
114 adjusted_args_size = *args_size; | |
115 /* Compute the actual size of the argument block required. The variable | |
116 and constant sizes must be combined, the size may have to be rounded, | |
117 and there may be a minimum required size. When generating a sibcall | |
118 pattern, do not round up, since we'll be re-using whatever space our | |
119 caller provided. */ | |
120 unadjusted_args_size | |
121 = compute_argument_block_size (reg_parm_stack_space, | |
122 &adjusted_args_size, | |
123 fndecl, fntype, | |
124 (pass == 0 ? 0 | |
125 : preferred_stack_boundary)); | |
126 | |
127 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; | |
128 | |
129 argblock = crtl->args.internal_arg_pointer; | |
130 | |
131 argblock | |
132 #ifdef STACK_GROWS_DOWNWARD | |
133 = plus_constant (argblock, crtl->args.pretend_args_size); | |
134 #else | |
135 = plus_constant (argblock, -crtl->args.pretend_args_size); | |
136 #endif | |
137 | |
138 | |
139 stored_args_map = sbitmap_alloc (args_size->constant); | |
140 sbitmap_zero (stored_args_map); | |
141 | |
142 | |
143 if (ACCUMULATE_OUTGOING_ARGS) | |
144 { | |
145 /* The save/restore code in store_one_arg handles all | |
146 cases except one: a constructor call (including a C | |
147 function returning a BLKmode struct) to initialize | |
148 an argument. */ | |
149 if (stack_arg_under_construction) | |
150 { | |
151 rtx push_size | |
152 = GEN_INT (adjusted_args_size.constant | |
153 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype | |
154 : TREE_TYPE (fndecl))) ? 0 | |
155 : reg_parm_stack_space)); | |
156 if (old_stack_level == 0) | |
157 { | |
158 emit_stack_save (SAVE_BLOCK, &old_stack_level, | |
159 NULL_RTX); | |
160 old_stack_pointer_delta = stack_pointer_delta; | |
161 old_pending_adj = pending_stack_adjust; | |
162 pending_stack_adjust = 0; | |
163 /* stack_arg_under_construction says whether a stack | |
164 arg is being constructed at the old stack level. | |
165 Pushing the stack gets a clean outgoing argument | |
166 block. */ | |
167 old_stack_arg_under_construction | |
168 = stack_arg_under_construction; | |
169 stack_arg_under_construction = 0; | |
170 /* Make a new map for the new argument list. */ | |
171 if (stack_usage_map_buf) | |
172 free (stack_usage_map_buf); | |
173 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); | |
174 stack_usage_map = stack_usage_map_buf; | |
175 memset (stack_usage_map, 0, highest_outgoing_arg_in_use); | |
176 highest_outgoing_arg_in_use = 0; | |
177 } | |
178 allocate_dynamic_stack_space (push_size, NULL_RTX, | |
179 BITS_PER_UNIT); | |
180 } | |
181 | |
182 /* If argument evaluation might modify the stack pointer, | |
183 copy the address of the argument list to a register. */ | |
184 for (i = 0; i < num_actuals; i++) | |
185 if (args[i].pass_on_stack) | |
186 { | |
187 argblock = copy_addr_to_reg (argblock); | |
188 break; | |
189 } | |
190 } | |
191 | |
192 compute_argument_addresses (args, argblock, num_actuals); | |
193 | |
194 /* in the case that | |
195 a function goto codesegment. | |
196 adjust stack space. */ | |
197 if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) ) | |
198 //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) ) | |
199 { | |
200 HOST_WIDE_INT padding; | |
21 | 201 padding = CbC_PRETENDED_STACK_SIZE - |
3 | 202 (crtl->args.size - crtl->args.pretend_args_size); |
203 if (0&&padding > 0) | |
204 anti_adjust_stack (GEN_INT (padding)); | |
205 } | |
206 | |
207 /* Now that the stack is properly aligned, pops can't safely | |
208 be deferred during the evaluation of the arguments. */ | |
209 NO_DEFER_POP; | |
210 | |
211 funexp = rtx_for_function_call (fndecl, addr); | |
212 | |
213 /* Figure out the register where the value, if any, will come back. */ | |
214 valreg = 0; | |
215 | |
216 | |
217 /* Precompute all register parameters. It isn't safe to compute anything | |
218 once we have started filling any specific hard regs. */ | |
219 precompute_register_parameters (num_actuals, args, ®_parm_seen); | |
220 | |
221 /* operand[2] is environment. */ | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
222 if (TREE_OPERAND (exp, 2)) |
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
223 static_chain_value = expand_normal (TREE_OPERAND (exp, 2)); |
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
224 else |
3 | 225 static_chain_value = 0; |
226 | |
227 | |
228 /* parallel assignment */ | |
229 store_order = alloca (num_actuals * sizeof (int)); | |
230 memset (store_order, 0, num_actuals * sizeof (int)); | |
231 | |
232 /* fill the arg[i]->exprs. */ | |
233 for (i = 0; i < num_actuals; i++) | |
234 { | |
235 if (args[i].reg == 0 || args[i].pass_on_stack) | |
236 { | |
237 preexpand_argument_expr (&args[i], | |
238 adjusted_args_size.var != 0); | |
239 } | |
240 } | |
241 | |
242 | |
243 /* push overlapped argument to stack. */ | |
244 push_overlaps(args, num_actuals); | |
245 | |
246 /* determine ordering to store arguments. | |
247 and generate RTL that store some variable temporary, if it needed.*/ | |
248 /* now... this function do nothing. */ | |
249 determine_order(store_order, num_actuals); | |
250 | |
251 /* push arguments in the order . */ | |
252 for (i = 0; i < num_actuals; i++) | |
253 { | |
254 if (args[store_order[i]].reg == 0 | |
255 || args[store_order[i]].pass_on_stack | |
256 || args[store_order[i]].partial!=0 ) | |
257 { | |
258 expand_one_arg_push (&args[store_order[i]], argblock, flags, | |
259 adjusted_args_size.var != 0, | |
260 reg_parm_stack_space); | |
261 } | |
262 } | |
263 | |
264 | |
265 /* If register arguments require space on the stack and stack space | |
266 was not preallocated, allocate stack space here for arguments | |
267 passed in registers. */ | |
268 #ifdef OUTGOING_REG_PARM_STACK_SPACE | |
269 //if (!ACCUMULATE_OUTGOING_ARGS | |
270 //&& must_preallocate == 0 && reg_parm_stack_space > 0) | |
271 //anti_adjust_stack (GEN_INT (reg_parm_stack_space)); | |
272 #endif | |
273 | |
274 /* */ | |
275 funexp = prepare_call_address (funexp, static_chain_value, | |
276 &call_fusage, reg_parm_seen, pass == 0); | |
277 | |
278 /* store args into register. */ | |
279 load_register_parameters (args, num_actuals, &call_fusage, flags, | |
280 //pass == 0, &sibcall_failure); | |
281 0, NULL); | |
282 | |
283 /* Save a pointer to the last insn before the call, so that we can | |
284 later safely search backwards to find the CALL_INSN. */ | |
285 before_call = get_last_insn (); | |
286 | |
287 /* Set up next argument register. For sibling calls on machines | |
288 with register windows this should be the incoming register. */ | |
289 #ifdef FUNCTION_INCOMING_ARG | |
290 if (pass == 0) | |
291 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, | |
292 void_type_node, 1); | |
293 else | |
294 #endif | |
295 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, | |
296 void_type_node, 1); | |
297 | |
298 /* All arguments and registers used for the call must be set up by | |
299 now! */ | |
300 | |
301 /* Stack must be properly aligned now. */ | |
302 gcc_assert (!pass | |
303 || !(stack_pointer_delta % preferred_unit_stack_boundary)); | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
304 #if 0 |
3 | 305 /* store environment. */ |
306 if ( env_tree!=NULL ) | |
307 { | |
308 emit_insn (gen_rtx_CLOBBER (VOIDmode, | |
309 gen_rtx_MEM (BLKmode, | |
310 hard_frame_pointer_rtx))); | |
311 emit_move_insn (hard_frame_pointer_rtx, env_rtx); | |
312 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
313 //pop_temp_slots (); | |
314 | |
315 emit_indirect_jump (funexp); | |
316 } | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
317 #endif |
3 | 318 |
319 /* Generate the actual call instruction. */ | |
320 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, | |
321 adjusted_args_size.constant, struct_value_size, | |
322 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, | |
323 next_arg_reg, valreg, 0, call_fusage, | |
324 flags, & args_so_far); | |
325 | |
326 /* If a non-BLKmode value is returned at the most significant end | |
327 of a register, shift the register right by the appropriate amount | |
328 and update VALREG accordingly. BLKmode values are handled by the | |
329 group load/store machinery below. */ | |
330 if (!structure_value_addr | |
331 && !pcc_struct_value | |
332 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode | |
333 && targetm.calls.return_in_msb (TREE_TYPE (exp))) | |
334 { | |
335 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) | |
336 sibcall_failure = 1; | |
337 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); | |
338 } | |
339 | |
340 | |
341 /* For calls to `setjmp', etc., inform flow.c it should complain | |
342 if nonvolatile values are live. For functions that cannot return, | |
343 inform flow that control does not fall through. */ | |
344 | |
345 if ((flags & ECF_NORETURN) || pass == 0) | |
346 { | |
347 /* The barrier must be emitted | |
348 immediately after the CALL_INSN. Some ports emit more | |
349 than just a CALL_INSN above, so we must search for it here. */ | |
350 | |
351 rtx last = get_last_insn (); | |
352 while (!CALL_P (last)) | |
353 { | |
354 last = PREV_INSN (last); | |
355 /* There was no CALL_INSN? */ | |
356 gcc_assert (last != before_call); | |
357 } | |
358 | |
359 emit_barrier_after (last); | |
360 | |
361 /* Stack adjustments after a noreturn call are dead code. | |
362 However when NO_DEFER_POP is in effect, we must preserve | |
363 stack_pointer_delta. */ | |
364 if (inhibit_defer_pop == 0) | |
365 { | |
366 stack_pointer_delta = old_stack_allocated; | |
367 pending_stack_adjust = 0; | |
368 } | |
369 } | |
370 | |
371 /* If value type not void, return an rtx for the value. */ | |
372 | |
373 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode | |
374 || ignore) | |
375 target = const0_rtx; | |
376 | |
377 if (targetm.calls.promote_function_return(funtype)) | |
378 { | |
379 /* If we promoted this return value, make the proper SUBREG. | |
380 TARGET might be const0_rtx here, so be careful. */ | |
381 if (REG_P (target) | |
382 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode | |
383 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) | |
384 { | |
385 tree type = TREE_TYPE (exp); | |
386 int unsignedp = TYPE_UNSIGNED (type); | |
387 int offset = 0; | |
388 enum machine_mode pmode; | |
389 | |
390 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); | |
391 /* If we don't promote as expected, something is wrong. */ | |
392 gcc_assert (GET_MODE (target) == pmode); | |
393 | |
394 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) | |
395 && (GET_MODE_SIZE (GET_MODE (target)) | |
396 > GET_MODE_SIZE (TYPE_MODE (type)))) | |
397 { | |
398 offset = GET_MODE_SIZE (GET_MODE (target)) | |
399 - GET_MODE_SIZE (TYPE_MODE (type)); | |
400 if (! BYTES_BIG_ENDIAN) | |
401 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; | |
402 else if (! WORDS_BIG_ENDIAN) | |
403 offset %= UNITS_PER_WORD; | |
404 } | |
405 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); | |
406 SUBREG_PROMOTED_VAR_P (target) = 1; | |
407 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); | |
408 } | |
409 } | |
410 | |
411 /* If size of args is variable or this was a constructor call for a stack | |
412 argument, restore saved stack-pointer value. */ | |
413 | |
414 if (old_stack_level) | |
415 { | |
416 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
417 stack_pointer_delta = old_stack_pointer_delta; | |
418 pending_stack_adjust = old_pending_adj; | |
419 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; | |
420 stack_arg_under_construction = old_stack_arg_under_construction; | |
421 highest_outgoing_arg_in_use = initial_highest_arg_in_use; | |
422 stack_usage_map = initial_stack_usage_map; | |
423 } | |
424 | |
425 /* If this was alloca, record the new stack level for nonlocal gotos. | |
426 Check for the handler slots since we might not have a save area | |
427 for non-local gotos. */ | |
428 | |
429 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) | |
430 update_nonlocal_goto_save_area (); | |
431 | |
432 /* Free up storage we no longer need. */ | |
433 for (i = 0; i < num_actuals; ++i) | |
434 if (args[i].aligned_regs) | |
435 free (args[i].aligned_regs); | |
436 | |
437 insns = get_insns (); | |
438 end_sequence (); | |
439 | |
440 tail_call_insns = insns; | |
441 | |
442 /* Restore the pending stack adjustment now that we have | |
443 finished generating the sibling call sequence. */ | |
444 | |
445 pending_stack_adjust = save_pending_stack_adjust; | |
446 stack_pointer_delta = save_stack_pointer_delta; | |
447 | |
448 /* Prepare arg structure for next iteration. */ | |
449 for (i = 0; i < num_actuals; i++) | |
450 { | |
451 args[i].value = 0; | |
452 args[i].aligned_regs = 0; | |
453 args[i].stack = 0; | |
454 } | |
455 | |
456 sbitmap_free (stored_args_map); | |
457 | |
458 | |
459 emit_insn(tail_call_insns); | |
460 crtl->tail_call_emit = true; | |
461 | |
462 return target; | |
463 } | |
464 | |
465 | |
466 static void | |
467 preexpand_argument_expr (struct arg_data *arg, | |
468 int variable_size ATTRIBUTE_UNUSED) | |
469 { | |
470 tree pval = arg->tree_value; | |
471 rtx reg = 0; | |
472 int partial = 0; | |
473 | |
474 if (TREE_CODE (pval) == ERROR_MARK) | |
475 return; | |
476 | |
477 /* Push a new temporary level for any temporaries we make for | |
478 this argument. */ | |
479 push_temp_slots (); | |
480 | |
481 | |
482 /* If this isn't going to be placed on both the stack and in registers, | |
483 set up the register and number of words. */ | |
484 if (! arg->pass_on_stack) | |
485 { | |
486 //if (flags & ECF_SIBCALL) | |
487 reg = arg->tail_call_reg; | |
488 //else | |
489 //reg = arg->reg; | |
490 partial = arg->partial; | |
491 } | |
492 | |
493 /* Being passed entirely in a register. We shouldn't be called in | |
494 this case. */ | |
495 gcc_assert (reg == 0 || partial != 0); | |
496 | |
497 /* If this arg needs special alignment, don't load the registers | |
498 here. */ | |
499 if (arg->n_aligned_regs != 0) | |
500 reg = 0; | |
501 | |
502 /* Start a new sequence for the arg->exprs. */ | |
503 start_sequence (); | |
504 | |
505 | |
506 if (arg->pass_on_stack) | |
507 stack_arg_under_construction++; | |
508 | |
509 arg->value = expand_expr (pval, | |
510 (partial | |
511 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) | |
512 ? NULL_RTX : arg->stack, | |
513 VOIDmode, EXPAND_STACK_PARM); | |
514 | |
515 /* If we are promoting object (or for any other reason) the mode | |
516 doesn't agree, convert the mode. */ | |
517 | |
518 if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) | |
519 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), | |
520 arg->value, arg->unsignedp); | |
521 | |
522 if (arg->pass_on_stack) | |
523 stack_arg_under_construction--; | |
524 | |
525 arg->exprs = get_insns (); | |
526 end_sequence (); | |
527 | |
528 if (arg->exprs) emit_insn(arg->exprs); | |
529 | |
530 preserve_temp_slots (arg->value); | |
531 pop_temp_slots (); | |
532 | |
533 return ; | |
534 } | |
535 | |
536 static int | |
537 expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags, | |
538 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) | |
539 { | |
540 tree pval = arg->tree_value; | |
541 int used = 0; | |
542 int i, lower_bound = 0, upper_bound = 0; | |
543 rtx reg = 0; | |
544 int partial = 0; | |
545 | |
546 /* Push a new temporary level for any temporaries we make for | |
547 this argument. */ | |
548 push_temp_slots (); | |
549 | |
550 | |
551 /* copy from store_one_arg. modify here after.*/ | |
552 /* If this isn't going to be placed on both the stack and in registers, | |
553 set up the register and number of words. */ | |
554 if (! arg->pass_on_stack) | |
555 { | |
556 //if (flags & ECF_SIBCALL) | |
557 reg = arg->tail_call_reg; | |
558 //else | |
559 //reg = arg->reg; | |
560 partial = arg->partial; | |
561 } | |
562 /* Being passed entirely in a register. We shouldn't be called in | |
563 this case. */ | |
564 gcc_assert (reg == 0 || partial != 0); | |
565 /* If this arg needs special alignment, don't load the registers | |
566 here. */ | |
567 if (arg->n_aligned_regs != 0) | |
568 reg = 0; | |
569 | |
570 | |
571 | |
572 | |
573 if (arg->value == arg->stack) | |
574 /* If the value is already in the stack slot, we are done. */ | |
575 ; | |
576 else if (arg->mode != BLKmode) | |
577 { | |
578 int size; | |
579 | |
580 /* Argument is a scalar, not entirely passed in registers. | |
581 (If part is passed in registers, arg->partial says how much | |
582 and emit_push_insn will take care of putting it there.) | |
583 | |
584 Push it, and if its size is less than the | |
585 amount of space allocated to it, | |
586 also bump stack pointer by the additional space. | |
587 Note that in C the default argument promotions | |
588 will prevent such mismatches. */ | |
589 | |
590 size = GET_MODE_SIZE (arg->mode); | |
591 /* Compute how much space the push instruction will push. | |
592 On many machines, pushing a byte will advance the stack | |
593 pointer by a halfword. */ | |
594 #ifdef PUSH_ROUNDING | |
595 size = PUSH_ROUNDING (size); | |
596 #endif | |
597 used = size; | |
598 | |
599 /* Compute how much space the argument should get: | |
600 round up to a multiple of the alignment for arguments. */ | |
601 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) | |
602 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) | |
603 / (PARM_BOUNDARY / BITS_PER_UNIT)) | |
604 * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
605 | |
606 /* This isn't already where we want it on the stack, so put it there. | |
607 This can either be done with push or copy insns. */ | |
608 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, | |
609 PARM_BOUNDARY, partial, reg, used - size, argblock, | |
610 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, | |
611 ARGS_SIZE_RTX (arg->locate.alignment_pad)); | |
612 | |
613 /* Unless this is a partially-in-register argument, the argument is now | |
614 in the stack. */ | |
615 if (partial == 0) | |
616 arg->value = arg->stack; | |
617 } | |
618 else | |
619 { | |
620 /* BLKmode, at least partly to be pushed. */ | |
621 | |
622 unsigned int parm_align; | |
623 int excess; | |
624 rtx size_rtx; | |
625 | |
626 /* Pushing a nonscalar. | |
627 If part is passed in registers, PARTIAL says how much | |
628 and emit_push_insn will take care of putting it there. */ | |
629 | |
630 /* Round its size up to a multiple | |
631 of the allocation unit for arguments. */ | |
632 | |
633 if (arg->locate.size.var != 0) | |
634 { | |
635 excess = 0; | |
636 size_rtx = ARGS_SIZE_RTX (arg->locate.size); | |
637 } | |
638 else | |
639 { | |
640 /* PUSH_ROUNDING has no effect on us, because emit_push_insn | |
641 for BLKmode is careful to avoid it. */ | |
642 excess = (arg->locate.size.constant | |
643 - int_size_in_bytes (TREE_TYPE (pval)) | |
644 + partial); | |
645 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), | |
646 NULL_RTX, TYPE_MODE (sizetype), 0); | |
647 } | |
648 | |
649 parm_align = arg->locate.boundary; | |
650 | |
651 /* When an argument is padded down, the block is aligned to | |
652 PARM_BOUNDARY, but the actual argument isn't. */ | |
653 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) | |
654 { | |
655 if (arg->locate.size.var) | |
656 parm_align = BITS_PER_UNIT; | |
657 else if (excess) | |
658 { | |
659 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; | |
660 parm_align = MIN (parm_align, excess_align); | |
661 } | |
662 } | |
663 | |
664 if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) | |
665 { | |
666 /* emit_push_insn might not work properly if arg->value and | |
667 argblock + arg->locate.offset areas overlap. */ | |
668 rtx x = arg->value; | |
669 int i = 0; | |
670 | |
671 if (XEXP (x, 0) == crtl->args.internal_arg_pointer | |
672 || (GET_CODE (XEXP (x, 0)) == PLUS | |
673 && XEXP (XEXP (x, 0), 0) == | |
674 crtl->args.internal_arg_pointer | |
675 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) | |
676 { | |
677 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) | |
678 i = INTVAL (XEXP (XEXP (x, 0), 1)); | |
679 | |
680 /* expand_call should ensure this. */ | |
681 gcc_assert (!arg->locate.offset.var | |
682 && GET_CODE (size_rtx) == CONST_INT); | |
683 } | |
684 } | |
685 | |
686 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, | |
687 parm_align, partial, reg, excess, argblock, | |
688 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, | |
689 ARGS_SIZE_RTX (arg->locate.alignment_pad)); | |
690 | |
691 /* Unless this is a partially-in-register argument, the argument is now | |
692 in the stack. | |
693 | |
694 ??? Unlike the case above, in which we want the actual | |
695 address of the data, so that we can load it directly into a | |
696 register, here we want the address of the stack slot, so that | |
697 it's properly aligned for word-by-word copying or something | |
698 like that. It's not clear that this is always correct. */ | |
699 if (partial == 0) | |
700 arg->value = arg->stack_slot; | |
701 } | |
702 | |
703 if (arg->reg && GET_CODE (arg->reg) == PARALLEL) | |
704 { | |
705 tree type = TREE_TYPE (arg->tree_value); | |
706 arg->parallel_value | |
707 = emit_group_load_into_temps (arg->reg, arg->value, type, | |
708 int_size_in_bytes (type)); | |
709 } | |
710 | |
711 /* Mark all slots this store used. */ | |
712 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) | |
713 && argblock && ! variable_size && arg->stack) | |
714 for (i = lower_bound; i < upper_bound; i++) | |
715 stack_usage_map[i] = 1; | |
716 | |
717 /* Once we have pushed something, pops can't safely | |
718 be deferred during the rest of the arguments. */ | |
719 NO_DEFER_POP; | |
720 | |
721 /* Free any temporary slots made in processing this argument. Show | |
722 that we might have taken the address of something and pushed that | |
723 as an operand. */ | |
724 preserve_temp_slots (NULL_RTX); | |
725 free_temp_slots (); | |
726 pop_temp_slots (); | |
727 | |
728 return 0; | |
729 } | |
730 | |
731 static void | |
732 determine_order(int *order, int num_actuals) | |
733 { | |
734 int i; | |
735 for (i=0; i<num_actuals; i++) | |
736 order[i] = num_actuals-i-1; | |
737 return; | |
738 } | |
739 | |
740 static void | |
741 push_overlaps(struct arg_data *args, int num_actuals) | |
742 { | |
743 int i; | |
744 | |
745 for (i=0; i<num_actuals; i++) | |
746 { | |
747 int dst_offset; /* */ | |
748 int src_offset; /* */ | |
749 rtx temp; | |
750 if ( (dst_offset=check_frame_offset(args[i].stack)) < 0 ) continue; | |
751 if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue; | |
752 | |
753 /* 退避 */ | |
754 temp = assign_temp(args[i].tree_value, 1, 0, 0); | |
755 if ( args[i].mode==BLKmode ) | |
756 emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 ); | |
757 else | |
758 emit_move_insn ( temp, args[i].value ); | |
759 args[i].value = temp; | |
760 | |
761 } | |
762 return; | |
763 } | |
764 | |
765 static int | |
766 check_frame_offset(rtx x) | |
767 { | |
768 int i; | |
769 rtx addr; | |
770 if ( !x || !MEM_P(x)) | |
771 return -1; | |
772 | |
773 addr = XEXP(x, 0); | |
774 if (addr == crtl->args.internal_arg_pointer) | |
775 i = 0; | |
776 else if (GET_CODE (addr) == PLUS | |
777 && XEXP (addr, 0) == crtl->args.internal_arg_pointer | |
778 && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
779 i = INTVAL (XEXP (addr, 1)); | |
780 else if (GET_CODE (addr) == PLUS | |
781 && GET_CODE (XEXP (addr, 0)) == CONST_INT | |
782 && XEXP (addr, 1) == crtl->args.internal_arg_pointer ) | |
783 i = INTVAL (XEXP (addr, 0)); | |
784 else | |
785 return -1; | |
786 | |
787 return i; | |
788 } | |
789 |