Mercurial > hg > CbC > CbC_gcc
annotate gcc/cbc-goto.h @ 25:2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 27 Oct 2009 16:04:06 +0900 |
parents | 959d4c8c8abc |
children | 3aaf117db171 |
rev | line source |
---|---|
3 | 1 |
2 static void | |
3 preexpand_argument_expr (struct arg_data *, int); | |
4 | |
5 static void | |
6 determine_order(int *, int); | |
7 | |
8 static int | |
9 expand_one_arg_push (struct arg_data *, rtx, int, int, int); | |
10 | |
11 static void | |
12 push_overlaps(struct arg_data *, int); | |
13 | |
14 static int | |
15 check_frame_offset(rtx); | |
16 | |
17 | |
18 static rtx | |
19 expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype, | |
20 tree addr, | |
21 int ignore, | |
22 int flags, | |
23 int num_actuals, | |
24 struct arg_data *args, | |
25 struct args_size *args_size, | |
26 CUMULATIVE_ARGS args_so_far, | |
27 rtx old_stack_level, | |
28 int reg_parm_stack_space, | |
29 int old_pending_adj, | |
30 unsigned HOST_WIDE_INT preferred_stack_boundary, | |
31 unsigned HOST_WIDE_INT preferred_unit_stack_boundary, | |
32 rtx structure_value_addr, | |
33 //int structure_value_addr_parm, | |
34 int old_inhibit_defer_pop | |
35 ) | |
36 { | |
37 | |
38 /* folowing variables is just copied from expand_call. */ | |
39 | |
40 int pass = 0; | |
41 int i; | |
42 #ifdef REG_PARM_STACK_SPACE | |
43 /* Define the boundary of the register parm stack space that needs to be | |
44 saved, if any. */ | |
45 #endif | |
46 rtx funexp; | |
47 rtx valreg; | |
48 struct args_size adjusted_args_size; | |
49 int unadjusted_args_size; | |
50 int reg_parm_seen; | |
51 rtx static_chain_value; | |
52 int old_stack_allocated; | |
53 int old_stack_pointer_delta = 0; | |
54 int old_stack_arg_under_construction = 0; | |
55 rtx call_fusage; | |
56 char *stack_usage_map_buf = NULL; | |
57 rtx argblock = 0; | |
58 HOST_WIDE_INT struct_value_size = 0; | |
59 int pcc_struct_value = 0; | |
60 int initial_highest_arg_in_use = highest_outgoing_arg_in_use; | |
61 char *initial_stack_usage_map = stack_usage_map; | |
62 rtx tail_call_insns = NULL_RTX; | |
63 | |
64 | |
65 int *store_order; | |
66 | |
67 | |
68 int sibcall_failure = 0; | |
69 /* We want to emit any pending stack adjustments before the tail | |
70 recursion "call". That way we know any adjustment after the tail | |
71 recursion call can be ignored if we indeed use the tail | |
72 call expansion. */ | |
73 int save_pending_stack_adjust = 0; | |
74 int save_stack_pointer_delta = 0; | |
75 rtx insns; | |
76 rtx before_call, next_arg_reg; | |
77 | |
78 /* State variables we need to save and restore between | |
79 iterations. */ | |
80 save_pending_stack_adjust = pending_stack_adjust; | |
81 save_stack_pointer_delta = stack_pointer_delta; | |
82 flags |= ECF_SIBCALL; | |
83 | |
84 /* Other state variables that we must reinitialize each time | |
85 through the loop (that are not initialized by the loop itself). */ | |
86 argblock = 0; | |
87 call_fusage = 0; | |
88 | |
89 /* Start a new sequence for the normal call case. | |
90 | |
91 From this point on, if the sibling call fails, we want to set | |
92 sibcall_failure instead of continuing the loop. */ | |
93 start_sequence (); | |
94 | |
95 /* Don't let pending stack adjusts add up to too much. | |
96 Also, do all pending adjustments now if there is any chance | |
97 this might be a call to alloca or if we are expanding a sibling | |
98 call sequence or if we are calling a function that is to return | |
99 with stack pointer depressed. | |
100 Also do the adjustments before a throwing call, otherwise | |
101 exception handling can fail; PR 19225. */ | |
102 if (pending_stack_adjust >= 32 | |
103 || (pending_stack_adjust > 0 | |
104 && (flags & ECF_MAY_BE_ALLOCA)) | |
105 || (pending_stack_adjust > 0 | |
106 && flag_exceptions && !(flags & ECF_NOTHROW)) | |
107 || pass == 0) | |
108 do_pending_stack_adjust (); | |
109 | |
110 | |
111 if (pass == 0 && crtl->stack_protect_guard) | |
112 stack_protect_epilogue (); | |
113 | |
114 adjusted_args_size = *args_size; | |
115 /* Compute the actual size of the argument block required. The variable | |
116 and constant sizes must be combined, the size may have to be rounded, | |
117 and there may be a minimum required size. When generating a sibcall | |
118 pattern, do not round up, since we'll be re-using whatever space our | |
119 caller provided. */ | |
120 unadjusted_args_size | |
121 = compute_argument_block_size (reg_parm_stack_space, | |
122 &adjusted_args_size, | |
123 fndecl, fntype, | |
124 (pass == 0 ? 0 | |
125 : preferred_stack_boundary)); | |
126 | |
127 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; | |
128 | |
129 argblock = crtl->args.internal_arg_pointer; | |
130 | |
131 argblock | |
132 #ifdef STACK_GROWS_DOWNWARD | |
133 = plus_constant (argblock, crtl->args.pretend_args_size); | |
134 #else | |
135 = plus_constant (argblock, -crtl->args.pretend_args_size); | |
136 #endif | |
137 | |
138 | |
139 stored_args_map = sbitmap_alloc (args_size->constant); | |
140 sbitmap_zero (stored_args_map); | |
141 | |
142 | |
143 if (ACCUMULATE_OUTGOING_ARGS) | |
144 { | |
145 /* The save/restore code in store_one_arg handles all | |
146 cases except one: a constructor call (including a C | |
147 function returning a BLKmode struct) to initialize | |
148 an argument. */ | |
149 if (stack_arg_under_construction) | |
150 { | |
151 rtx push_size | |
152 = GEN_INT (adjusted_args_size.constant | |
153 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype | |
154 : TREE_TYPE (fndecl))) ? 0 | |
155 : reg_parm_stack_space)); | |
156 if (old_stack_level == 0) | |
157 { | |
158 emit_stack_save (SAVE_BLOCK, &old_stack_level, | |
159 NULL_RTX); | |
160 old_stack_pointer_delta = stack_pointer_delta; | |
161 old_pending_adj = pending_stack_adjust; | |
162 pending_stack_adjust = 0; | |
163 /* stack_arg_under_construction says whether a stack | |
164 arg is being constructed at the old stack level. | |
165 Pushing the stack gets a clean outgoing argument | |
166 block. */ | |
167 old_stack_arg_under_construction | |
168 = stack_arg_under_construction; | |
169 stack_arg_under_construction = 0; | |
170 /* Make a new map for the new argument list. */ | |
171 if (stack_usage_map_buf) | |
172 free (stack_usage_map_buf); | |
173 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); | |
174 stack_usage_map = stack_usage_map_buf; | |
175 memset (stack_usage_map, 0, highest_outgoing_arg_in_use); | |
176 highest_outgoing_arg_in_use = 0; | |
177 } | |
178 allocate_dynamic_stack_space (push_size, NULL_RTX, | |
179 BITS_PER_UNIT); | |
180 } | |
181 | |
182 /* If argument evaluation might modify the stack pointer, | |
183 copy the address of the argument list to a register. */ | |
184 for (i = 0; i < num_actuals; i++) | |
185 if (args[i].pass_on_stack) | |
186 { | |
187 argblock = copy_addr_to_reg (argblock); | |
188 break; | |
189 } | |
190 } | |
191 | |
192 compute_argument_addresses (args, argblock, num_actuals); | |
193 | |
194 /* in the case that | |
195 a function goto codesegment. | |
196 adjust stack space. */ | |
197 if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) ) | |
198 //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) ) | |
199 { | |
200 HOST_WIDE_INT padding; | |
21 | 201 padding = CbC_PRETENDED_STACK_SIZE - |
3 | 202 (crtl->args.size - crtl->args.pretend_args_size); |
203 if (0&&padding > 0) | |
204 anti_adjust_stack (GEN_INT (padding)); | |
205 } | |
206 | |
207 /* Now that the stack is properly aligned, pops can't safely | |
208 be deferred during the evaluation of the arguments. */ | |
209 NO_DEFER_POP; | |
210 | |
211 funexp = rtx_for_function_call (fndecl, addr); | |
212 | |
213 /* Figure out the register where the value, if any, will come back. */ | |
214 valreg = 0; | |
215 | |
216 | |
217 /* Precompute all register parameters. It isn't safe to compute anything | |
218 once we have started filling any specific hard regs. */ | |
219 precompute_register_parameters (num_actuals, args, ®_parm_seen); | |
220 | |
25
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
221 if (CALL_EXPR_STATIC_CHAIN (exp)) |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
222 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
223 else |
25
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
224 static_chain_value = 0; |
3 | 225 |
226 | |
227 /* parallel assignment */ | |
228 store_order = alloca (num_actuals * sizeof (int)); | |
229 memset (store_order, 0, num_actuals * sizeof (int)); | |
230 | |
231 /* fill the arg[i]->exprs. */ | |
232 for (i = 0; i < num_actuals; i++) | |
233 { | |
234 if (args[i].reg == 0 || args[i].pass_on_stack) | |
235 { | |
236 preexpand_argument_expr (&args[i], | |
237 adjusted_args_size.var != 0); | |
238 } | |
239 } | |
240 | |
241 | |
242 /* push overlapped argument to stack. */ | |
243 push_overlaps(args, num_actuals); | |
244 | |
245 /* determine ordering to store arguments. | |
246 and generate RTL that store some variable temporary, if it needed.*/ | |
247 /* now... this function do nothing. */ | |
248 determine_order(store_order, num_actuals); | |
249 | |
250 /* push arguments in the order . */ | |
251 for (i = 0; i < num_actuals; i++) | |
252 { | |
253 if (args[store_order[i]].reg == 0 | |
254 || args[store_order[i]].pass_on_stack | |
255 || args[store_order[i]].partial!=0 ) | |
256 { | |
257 expand_one_arg_push (&args[store_order[i]], argblock, flags, | |
258 adjusted_args_size.var != 0, | |
259 reg_parm_stack_space); | |
260 } | |
261 } | |
262 | |
263 | |
264 /* If register arguments require space on the stack and stack space | |
265 was not preallocated, allocate stack space here for arguments | |
266 passed in registers. */ | |
267 #ifdef OUTGOING_REG_PARM_STACK_SPACE | |
268 //if (!ACCUMULATE_OUTGOING_ARGS | |
269 //&& must_preallocate == 0 && reg_parm_stack_space > 0) | |
270 //anti_adjust_stack (GEN_INT (reg_parm_stack_space)); | |
271 #endif | |
272 | |
273 /* */ | |
274 funexp = prepare_call_address (funexp, static_chain_value, | |
275 &call_fusage, reg_parm_seen, pass == 0); | |
276 | |
277 /* store args into register. */ | |
278 load_register_parameters (args, num_actuals, &call_fusage, flags, | |
279 //pass == 0, &sibcall_failure); | |
280 0, NULL); | |
281 | |
282 /* Save a pointer to the last insn before the call, so that we can | |
283 later safely search backwards to find the CALL_INSN. */ | |
284 before_call = get_last_insn (); | |
285 | |
286 /* Set up next argument register. For sibling calls on machines | |
287 with register windows this should be the incoming register. */ | |
288 #ifdef FUNCTION_INCOMING_ARG | |
289 if (pass == 0) | |
290 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, | |
291 void_type_node, 1); | |
292 else | |
293 #endif | |
294 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, | |
295 void_type_node, 1); | |
296 | |
297 /* All arguments and registers used for the call must be set up by | |
298 now! */ | |
299 | |
300 /* Stack must be properly aligned now. */ | |
301 gcc_assert (!pass | |
302 || !(stack_pointer_delta % preferred_unit_stack_boundary)); | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
303 #if 0 |
3 | 304 /* store environment. */ |
305 if ( env_tree!=NULL ) | |
306 { | |
307 emit_insn (gen_rtx_CLOBBER (VOIDmode, | |
308 gen_rtx_MEM (BLKmode, | |
309 hard_frame_pointer_rtx))); | |
310 emit_move_insn (hard_frame_pointer_rtx, env_rtx); | |
311 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
312 //pop_temp_slots (); | |
313 | |
314 emit_indirect_jump (funexp); | |
315 } | |
7
7ff9aed93de8
partially written goto with env...
Shinji KONO <kono@ie.u-ryukyu.ac.jp>
parents:
6
diff
changeset
|
316 #endif |
25
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
317 if (GET_CODE (funexp) != SYMBOL_REF) |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
318 { |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
319 push_temp_slots(); |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
320 preserve_temp_slots(funexp); |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
321 /* Generate the actual call instruction. */ |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
322 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
323 adjusted_args_size.constant, struct_value_size, |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
324 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
325 next_arg_reg, valreg, 0, call_fusage, |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
326 flags, & args_so_far); |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
327 pop_temp_slots(); |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
328 } |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
329 else |
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
330 { |
3 | 331 |
332 /* Generate the actual call instruction. */ | |
333 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, | |
334 adjusted_args_size.constant, struct_value_size, | |
335 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, | |
336 next_arg_reg, valreg, 0, call_fusage, | |
337 flags, & args_so_far); | |
25
2476ed92181e
modified machine description of i386 for support indirect sibcall attributed fastcall.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
21
diff
changeset
|
338 } |
3 | 339 |
340 /* If a non-BLKmode value is returned at the most significant end | |
341 of a register, shift the register right by the appropriate amount | |
342 and update VALREG accordingly. BLKmode values are handled by the | |
343 group load/store machinery below. */ | |
344 if (!structure_value_addr | |
345 && !pcc_struct_value | |
346 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode | |
347 && targetm.calls.return_in_msb (TREE_TYPE (exp))) | |
348 { | |
349 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) | |
350 sibcall_failure = 1; | |
351 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); | |
352 } | |
353 | |
354 | |
355 /* For calls to `setjmp', etc., inform flow.c it should complain | |
356 if nonvolatile values are live. For functions that cannot return, | |
357 inform flow that control does not fall through. */ | |
358 | |
359 if ((flags & ECF_NORETURN) || pass == 0) | |
360 { | |
361 /* The barrier must be emitted | |
362 immediately after the CALL_INSN. Some ports emit more | |
363 than just a CALL_INSN above, so we must search for it here. */ | |
364 | |
365 rtx last = get_last_insn (); | |
366 while (!CALL_P (last)) | |
367 { | |
368 last = PREV_INSN (last); | |
369 /* There was no CALL_INSN? */ | |
370 gcc_assert (last != before_call); | |
371 } | |
372 | |
373 emit_barrier_after (last); | |
374 | |
375 /* Stack adjustments after a noreturn call are dead code. | |
376 However when NO_DEFER_POP is in effect, we must preserve | |
377 stack_pointer_delta. */ | |
378 if (inhibit_defer_pop == 0) | |
379 { | |
380 stack_pointer_delta = old_stack_allocated; | |
381 pending_stack_adjust = 0; | |
382 } | |
383 } | |
384 | |
385 /* If value type not void, return an rtx for the value. */ | |
386 | |
387 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode | |
388 || ignore) | |
389 target = const0_rtx; | |
390 | |
391 if (targetm.calls.promote_function_return(funtype)) | |
392 { | |
393 /* If we promoted this return value, make the proper SUBREG. | |
394 TARGET might be const0_rtx here, so be careful. */ | |
395 if (REG_P (target) | |
396 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode | |
397 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) | |
398 { | |
399 tree type = TREE_TYPE (exp); | |
400 int unsignedp = TYPE_UNSIGNED (type); | |
401 int offset = 0; | |
402 enum machine_mode pmode; | |
403 | |
404 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); | |
405 /* If we don't promote as expected, something is wrong. */ | |
406 gcc_assert (GET_MODE (target) == pmode); | |
407 | |
408 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) | |
409 && (GET_MODE_SIZE (GET_MODE (target)) | |
410 > GET_MODE_SIZE (TYPE_MODE (type)))) | |
411 { | |
412 offset = GET_MODE_SIZE (GET_MODE (target)) | |
413 - GET_MODE_SIZE (TYPE_MODE (type)); | |
414 if (! BYTES_BIG_ENDIAN) | |
415 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; | |
416 else if (! WORDS_BIG_ENDIAN) | |
417 offset %= UNITS_PER_WORD; | |
418 } | |
419 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); | |
420 SUBREG_PROMOTED_VAR_P (target) = 1; | |
421 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); | |
422 } | |
423 } | |
424 | |
425 /* If size of args is variable or this was a constructor call for a stack | |
426 argument, restore saved stack-pointer value. */ | |
427 | |
428 if (old_stack_level) | |
429 { | |
430 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
431 stack_pointer_delta = old_stack_pointer_delta; | |
432 pending_stack_adjust = old_pending_adj; | |
433 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; | |
434 stack_arg_under_construction = old_stack_arg_under_construction; | |
435 highest_outgoing_arg_in_use = initial_highest_arg_in_use; | |
436 stack_usage_map = initial_stack_usage_map; | |
437 } | |
438 | |
439 /* If this was alloca, record the new stack level for nonlocal gotos. | |
440 Check for the handler slots since we might not have a save area | |
441 for non-local gotos. */ | |
442 | |
443 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0) | |
444 update_nonlocal_goto_save_area (); | |
445 | |
446 /* Free up storage we no longer need. */ | |
447 for (i = 0; i < num_actuals; ++i) | |
448 if (args[i].aligned_regs) | |
449 free (args[i].aligned_regs); | |
450 | |
451 insns = get_insns (); | |
452 end_sequence (); | |
453 | |
454 tail_call_insns = insns; | |
455 | |
456 /* Restore the pending stack adjustment now that we have | |
457 finished generating the sibling call sequence. */ | |
458 | |
459 pending_stack_adjust = save_pending_stack_adjust; | |
460 stack_pointer_delta = save_stack_pointer_delta; | |
461 | |
462 /* Prepare arg structure for next iteration. */ | |
463 for (i = 0; i < num_actuals; i++) | |
464 { | |
465 args[i].value = 0; | |
466 args[i].aligned_regs = 0; | |
467 args[i].stack = 0; | |
468 } | |
469 | |
470 sbitmap_free (stored_args_map); | |
471 | |
472 | |
473 emit_insn(tail_call_insns); | |
474 crtl->tail_call_emit = true; | |
475 | |
476 return target; | |
477 } | |
478 | |
479 | |
480 static void | |
481 preexpand_argument_expr (struct arg_data *arg, | |
482 int variable_size ATTRIBUTE_UNUSED) | |
483 { | |
484 tree pval = arg->tree_value; | |
485 rtx reg = 0; | |
486 int partial = 0; | |
487 | |
488 if (TREE_CODE (pval) == ERROR_MARK) | |
489 return; | |
490 | |
491 /* Push a new temporary level for any temporaries we make for | |
492 this argument. */ | |
493 push_temp_slots (); | |
494 | |
495 | |
496 /* If this isn't going to be placed on both the stack and in registers, | |
497 set up the register and number of words. */ | |
498 if (! arg->pass_on_stack) | |
499 { | |
500 //if (flags & ECF_SIBCALL) | |
501 reg = arg->tail_call_reg; | |
502 //else | |
503 //reg = arg->reg; | |
504 partial = arg->partial; | |
505 } | |
506 | |
507 /* Being passed entirely in a register. We shouldn't be called in | |
508 this case. */ | |
509 gcc_assert (reg == 0 || partial != 0); | |
510 | |
511 /* If this arg needs special alignment, don't load the registers | |
512 here. */ | |
513 if (arg->n_aligned_regs != 0) | |
514 reg = 0; | |
515 | |
516 /* Start a new sequence for the arg->exprs. */ | |
517 start_sequence (); | |
518 | |
519 | |
520 if (arg->pass_on_stack) | |
521 stack_arg_under_construction++; | |
522 | |
523 arg->value = expand_expr (pval, | |
524 (partial | |
525 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) | |
526 ? NULL_RTX : arg->stack, | |
527 VOIDmode, EXPAND_STACK_PARM); | |
528 | |
529 /* If we are promoting object (or for any other reason) the mode | |
530 doesn't agree, convert the mode. */ | |
531 | |
532 if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) | |
533 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), | |
534 arg->value, arg->unsignedp); | |
535 | |
536 if (arg->pass_on_stack) | |
537 stack_arg_under_construction--; | |
538 | |
539 arg->exprs = get_insns (); | |
540 end_sequence (); | |
541 | |
542 if (arg->exprs) emit_insn(arg->exprs); | |
543 | |
544 preserve_temp_slots (arg->value); | |
545 pop_temp_slots (); | |
546 | |
547 return ; | |
548 } | |
549 | |
550 static int | |
551 expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags, | |
552 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) | |
553 { | |
554 tree pval = arg->tree_value; | |
555 int used = 0; | |
556 int i, lower_bound = 0, upper_bound = 0; | |
557 rtx reg = 0; | |
558 int partial = 0; | |
559 | |
560 /* Push a new temporary level for any temporaries we make for | |
561 this argument. */ | |
562 push_temp_slots (); | |
563 | |
564 | |
565 /* copy from store_one_arg. modify here after.*/ | |
566 /* If this isn't going to be placed on both the stack and in registers, | |
567 set up the register and number of words. */ | |
568 if (! arg->pass_on_stack) | |
569 { | |
570 //if (flags & ECF_SIBCALL) | |
571 reg = arg->tail_call_reg; | |
572 //else | |
573 //reg = arg->reg; | |
574 partial = arg->partial; | |
575 } | |
576 /* Being passed entirely in a register. We shouldn't be called in | |
577 this case. */ | |
578 gcc_assert (reg == 0 || partial != 0); | |
579 /* If this arg needs special alignment, don't load the registers | |
580 here. */ | |
581 if (arg->n_aligned_regs != 0) | |
582 reg = 0; | |
583 | |
584 | |
585 | |
586 | |
587 if (arg->value == arg->stack) | |
588 /* If the value is already in the stack slot, we are done. */ | |
589 ; | |
590 else if (arg->mode != BLKmode) | |
591 { | |
592 int size; | |
593 | |
594 /* Argument is a scalar, not entirely passed in registers. | |
595 (If part is passed in registers, arg->partial says how much | |
596 and emit_push_insn will take care of putting it there.) | |
597 | |
598 Push it, and if its size is less than the | |
599 amount of space allocated to it, | |
600 also bump stack pointer by the additional space. | |
601 Note that in C the default argument promotions | |
602 will prevent such mismatches. */ | |
603 | |
604 size = GET_MODE_SIZE (arg->mode); | |
605 /* Compute how much space the push instruction will push. | |
606 On many machines, pushing a byte will advance the stack | |
607 pointer by a halfword. */ | |
608 #ifdef PUSH_ROUNDING | |
609 size = PUSH_ROUNDING (size); | |
610 #endif | |
611 used = size; | |
612 | |
613 /* Compute how much space the argument should get: | |
614 round up to a multiple of the alignment for arguments. */ | |
615 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) | |
616 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) | |
617 / (PARM_BOUNDARY / BITS_PER_UNIT)) | |
618 * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
619 | |
620 /* This isn't already where we want it on the stack, so put it there. | |
621 This can either be done with push or copy insns. */ | |
622 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, | |
623 PARM_BOUNDARY, partial, reg, used - size, argblock, | |
624 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, | |
625 ARGS_SIZE_RTX (arg->locate.alignment_pad)); | |
626 | |
627 /* Unless this is a partially-in-register argument, the argument is now | |
628 in the stack. */ | |
629 if (partial == 0) | |
630 arg->value = arg->stack; | |
631 } | |
632 else | |
633 { | |
634 /* BLKmode, at least partly to be pushed. */ | |
635 | |
636 unsigned int parm_align; | |
637 int excess; | |
638 rtx size_rtx; | |
639 | |
640 /* Pushing a nonscalar. | |
641 If part is passed in registers, PARTIAL says how much | |
642 and emit_push_insn will take care of putting it there. */ | |
643 | |
644 /* Round its size up to a multiple | |
645 of the allocation unit for arguments. */ | |
646 | |
647 if (arg->locate.size.var != 0) | |
648 { | |
649 excess = 0; | |
650 size_rtx = ARGS_SIZE_RTX (arg->locate.size); | |
651 } | |
652 else | |
653 { | |
654 /* PUSH_ROUNDING has no effect on us, because emit_push_insn | |
655 for BLKmode is careful to avoid it. */ | |
656 excess = (arg->locate.size.constant | |
657 - int_size_in_bytes (TREE_TYPE (pval)) | |
658 + partial); | |
659 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), | |
660 NULL_RTX, TYPE_MODE (sizetype), 0); | |
661 } | |
662 | |
663 parm_align = arg->locate.boundary; | |
664 | |
665 /* When an argument is padded down, the block is aligned to | |
666 PARM_BOUNDARY, but the actual argument isn't. */ | |
667 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) | |
668 { | |
669 if (arg->locate.size.var) | |
670 parm_align = BITS_PER_UNIT; | |
671 else if (excess) | |
672 { | |
673 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; | |
674 parm_align = MIN (parm_align, excess_align); | |
675 } | |
676 } | |
677 | |
678 if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) | |
679 { | |
680 /* emit_push_insn might not work properly if arg->value and | |
681 argblock + arg->locate.offset areas overlap. */ | |
682 rtx x = arg->value; | |
683 int i = 0; | |
684 | |
685 if (XEXP (x, 0) == crtl->args.internal_arg_pointer | |
686 || (GET_CODE (XEXP (x, 0)) == PLUS | |
687 && XEXP (XEXP (x, 0), 0) == | |
688 crtl->args.internal_arg_pointer | |
689 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) | |
690 { | |
691 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) | |
692 i = INTVAL (XEXP (XEXP (x, 0), 1)); | |
693 | |
694 /* expand_call should ensure this. */ | |
695 gcc_assert (!arg->locate.offset.var | |
696 && GET_CODE (size_rtx) == CONST_INT); | |
697 } | |
698 } | |
699 | |
700 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, | |
701 parm_align, partial, reg, excess, argblock, | |
702 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, | |
703 ARGS_SIZE_RTX (arg->locate.alignment_pad)); | |
704 | |
705 /* Unless this is a partially-in-register argument, the argument is now | |
706 in the stack. | |
707 | |
708 ??? Unlike the case above, in which we want the actual | |
709 address of the data, so that we can load it directly into a | |
710 register, here we want the address of the stack slot, so that | |
711 it's properly aligned for word-by-word copying or something | |
712 like that. It's not clear that this is always correct. */ | |
713 if (partial == 0) | |
714 arg->value = arg->stack_slot; | |
715 } | |
716 | |
717 if (arg->reg && GET_CODE (arg->reg) == PARALLEL) | |
718 { | |
719 tree type = TREE_TYPE (arg->tree_value); | |
720 arg->parallel_value | |
721 = emit_group_load_into_temps (arg->reg, arg->value, type, | |
722 int_size_in_bytes (type)); | |
723 } | |
724 | |
725 /* Mark all slots this store used. */ | |
726 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) | |
727 && argblock && ! variable_size && arg->stack) | |
728 for (i = lower_bound; i < upper_bound; i++) | |
729 stack_usage_map[i] = 1; | |
730 | |
731 /* Once we have pushed something, pops can't safely | |
732 be deferred during the rest of the arguments. */ | |
733 NO_DEFER_POP; | |
734 | |
735 /* Free any temporary slots made in processing this argument. Show | |
736 that we might have taken the address of something and pushed that | |
737 as an operand. */ | |
738 preserve_temp_slots (NULL_RTX); | |
739 free_temp_slots (); | |
740 pop_temp_slots (); | |
741 | |
742 return 0; | |
743 } | |
744 | |
745 static void | |
746 determine_order(int *order, int num_actuals) | |
747 { | |
748 int i; | |
749 for (i=0; i<num_actuals; i++) | |
750 order[i] = num_actuals-i-1; | |
751 return; | |
752 } | |
753 | |
754 static void | |
755 push_overlaps(struct arg_data *args, int num_actuals) | |
756 { | |
757 int i; | |
758 | |
759 for (i=0; i<num_actuals; i++) | |
760 { | |
761 int dst_offset; /* */ | |
762 int src_offset; /* */ | |
763 rtx temp; | |
764 if ( (dst_offset=check_frame_offset(args[i].stack)) < 0 ) continue; | |
765 if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue; | |
766 | |
767 /* 退避 */ | |
768 temp = assign_temp(args[i].tree_value, 1, 0, 0); | |
769 if ( args[i].mode==BLKmode ) | |
770 emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 ); | |
771 else | |
772 emit_move_insn ( temp, args[i].value ); | |
773 args[i].value = temp; | |
774 | |
775 } | |
776 return; | |
777 } | |
778 | |
779 static int | |
780 check_frame_offset(rtx x) | |
781 { | |
782 int i; | |
783 rtx addr; | |
784 if ( !x || !MEM_P(x)) | |
785 return -1; | |
786 | |
787 addr = XEXP(x, 0); | |
788 if (addr == crtl->args.internal_arg_pointer) | |
789 i = 0; | |
790 else if (GET_CODE (addr) == PLUS | |
791 && XEXP (addr, 0) == crtl->args.internal_arg_pointer | |
792 && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
793 i = INTVAL (XEXP (addr, 1)); | |
794 else if (GET_CODE (addr) == PLUS | |
795 && GET_CODE (XEXP (addr, 0)) == CONST_INT | |
796 && XEXP (addr, 1) == crtl->args.internal_arg_pointer ) | |
797 i = INTVAL (XEXP (addr, 0)); | |
798 else | |
799 return -1; | |
800 | |
801 return i; | |
802 } | |
803 |