comparison gcc/cbc-goto.h @ 58:3aaf117db171

error at dwarf2out.c
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 15 Feb 2010 14:58:24 +0900
parents 2476ed92181e
children
comparison
equal deleted inserted replaced
57:326d9e06c2e3 58:3aaf117db171
15 check_frame_offset(rtx); 15 check_frame_offset(rtx);
16 16
17 17
18 static rtx 18 static rtx
19 expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype, 19 expand_cbc_goto (tree exp, rtx target, tree fndecl, tree funtype, tree fntype,
20 tree addr, 20 tree addr,
21 int ignore, 21 int ignore,
22 int flags, 22 int flags,
23 int num_actuals, 23 int num_actuals,
24 struct arg_data *args, 24 struct arg_data *args,
25 struct args_size *args_size, 25 struct args_size *args_size,
26 CUMULATIVE_ARGS args_so_far, 26 CUMULATIVE_ARGS args_so_far,
27 rtx old_stack_level, 27 rtx old_stack_level,
28 int reg_parm_stack_space, 28 int reg_parm_stack_space,
29 int old_pending_adj, 29 int old_pending_adj,
30 unsigned HOST_WIDE_INT preferred_stack_boundary, 30 unsigned HOST_WIDE_INT preferred_stack_boundary,
31 unsigned HOST_WIDE_INT preferred_unit_stack_boundary, 31 unsigned HOST_WIDE_INT preferred_unit_stack_boundary,
32 rtx structure_value_addr, 32 rtx structure_value_addr,
33 //int structure_value_addr_parm, 33 //int structure_value_addr_parm,
34 int old_inhibit_defer_pop 34 int old_inhibit_defer_pop
35 ) 35 )
36 { 36 {
37 37
38 /* folowing variables is just copied from expand_call. */ 38 /* folowing variables is just copied from expand_call. */
39 39
40 int pass = 0; 40 int pass = 0;
99 with stack pointer depressed. 99 with stack pointer depressed.
100 Also do the adjustments before a throwing call, otherwise 100 Also do the adjustments before a throwing call, otherwise
101 exception handling can fail; PR 19225. */ 101 exception handling can fail; PR 19225. */
102 if (pending_stack_adjust >= 32 102 if (pending_stack_adjust >= 32
103 || (pending_stack_adjust > 0 103 || (pending_stack_adjust > 0
104 && (flags & ECF_MAY_BE_ALLOCA)) 104 && (flags & ECF_MAY_BE_ALLOCA))
105 || (pending_stack_adjust > 0 105 || (pending_stack_adjust > 0
106 && flag_exceptions && !(flags & ECF_NOTHROW)) 106 && flag_exceptions && !(flags & ECF_NOTHROW))
107 || pass == 0) 107 || pass == 0)
108 do_pending_stack_adjust (); 108 do_pending_stack_adjust ();
109 109
110 110
111 if (pass == 0 && crtl->stack_protect_guard) 111 if (pass == 0 && crtl->stack_protect_guard)
117 and there may be a minimum required size. When generating a sibcall 117 and there may be a minimum required size. When generating a sibcall
118 pattern, do not round up, since we'll be re-using whatever space our 118 pattern, do not round up, since we'll be re-using whatever space our
119 caller provided. */ 119 caller provided. */
120 unadjusted_args_size 120 unadjusted_args_size
121 = compute_argument_block_size (reg_parm_stack_space, 121 = compute_argument_block_size (reg_parm_stack_space,
122 &adjusted_args_size, 122 &adjusted_args_size,
123 fndecl, fntype, 123 fndecl, fntype,
124 (pass == 0 ? 0 124 (pass == 0 ? 0
125 : preferred_stack_boundary)); 125 : preferred_stack_boundary));
126 126
127 old_stack_allocated = stack_pointer_delta - pending_stack_adjust; 127 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
128 128
129 argblock = crtl->args.internal_arg_pointer; 129 argblock = crtl->args.internal_arg_pointer;
130 130
131 argblock 131 argblock
132 #ifdef STACK_GROWS_DOWNWARD 132 #ifdef STACK_GROWS_DOWNWARD
133 = plus_constant (argblock, crtl->args.pretend_args_size); 133 = plus_constant (argblock, crtl->args.pretend_args_size);
134 #else 134 #else
135 = plus_constant (argblock, -crtl->args.pretend_args_size); 135 = plus_constant (argblock, -crtl->args.pretend_args_size);
136 #endif 136 #endif
137 137
138 138
139 stored_args_map = sbitmap_alloc (args_size->constant); 139 stored_args_map = sbitmap_alloc (args_size->constant);
140 sbitmap_zero (stored_args_map); 140 sbitmap_zero (stored_args_map);
141 141
142 142
143 if (ACCUMULATE_OUTGOING_ARGS) 143 if (ACCUMULATE_OUTGOING_ARGS)
144 { 144 {
145 /* The save/restore code in store_one_arg handles all 145 /* The save/restore code in store_one_arg handles all
146 cases except one: a constructor call (including a C 146 cases except one: a constructor call (including a C
147 function returning a BLKmode struct) to initialize 147 function returning a BLKmode struct) to initialize
148 an argument. */ 148 an argument. */
149 if (stack_arg_under_construction) 149 if (stack_arg_under_construction)
150 { 150 {
151 rtx push_size 151 rtx push_size
152 = GEN_INT (adjusted_args_size.constant 152 = GEN_INT (adjusted_args_size.constant
153 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype 153 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
154 : TREE_TYPE (fndecl))) ? 0 154 : TREE_TYPE (fndecl))) ? 0
155 : reg_parm_stack_space)); 155 : reg_parm_stack_space));
156 if (old_stack_level == 0) 156 if (old_stack_level == 0)
157 { 157 {
158 emit_stack_save (SAVE_BLOCK, &old_stack_level, 158 emit_stack_save (SAVE_BLOCK, &old_stack_level,
159 NULL_RTX); 159 NULL_RTX);
160 old_stack_pointer_delta = stack_pointer_delta; 160 old_stack_pointer_delta = stack_pointer_delta;
161 old_pending_adj = pending_stack_adjust; 161 old_pending_adj = pending_stack_adjust;
162 pending_stack_adjust = 0; 162 pending_stack_adjust = 0;
163 /* stack_arg_under_construction says whether a stack 163 /* stack_arg_under_construction says whether a stack
164 arg is being constructed at the old stack level. 164 arg is being constructed at the old stack level.
165 Pushing the stack gets a clean outgoing argument 165 Pushing the stack gets a clean outgoing argument
166 block. */ 166 block. */
167 old_stack_arg_under_construction 167 old_stack_arg_under_construction
168 = stack_arg_under_construction; 168 = stack_arg_under_construction;
169 stack_arg_under_construction = 0; 169 stack_arg_under_construction = 0;
170 /* Make a new map for the new argument list. */ 170 /* Make a new map for the new argument list. */
171 if (stack_usage_map_buf) 171 if (stack_usage_map_buf)
172 free (stack_usage_map_buf); 172 free (stack_usage_map_buf);
173 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); 173 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
174 stack_usage_map = stack_usage_map_buf; 174 stack_usage_map = stack_usage_map_buf;
175 memset (stack_usage_map, 0, highest_outgoing_arg_in_use); 175 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
176 highest_outgoing_arg_in_use = 0; 176 highest_outgoing_arg_in_use = 0;
177 } 177 }
178 allocate_dynamic_stack_space (push_size, NULL_RTX, 178 allocate_dynamic_stack_space (push_size, NULL_RTX,
179 BITS_PER_UNIT); 179 BITS_PER_UNIT);
180 } 180 }
181 181
182 /* If argument evaluation might modify the stack pointer, 182 /* If argument evaluation might modify the stack pointer,
183 copy the address of the argument list to a register. */ 183 copy the address of the argument list to a register. */
184 for (i = 0; i < num_actuals; i++) 184 for (i = 0; i < num_actuals; i++)
185 if (args[i].pass_on_stack) 185 if (args[i].pass_on_stack)
186 { 186 {
187 argblock = copy_addr_to_reg (argblock); 187 argblock = copy_addr_to_reg (argblock);
188 break; 188 break;
189 } 189 }
190 } 190 }
191 191
192 compute_argument_addresses (args, argblock, num_actuals); 192 compute_argument_addresses (args, argblock, num_actuals);
193 193
194 /* in the case that 194 /* in the case that
195 a function goto codesegment. 195 a function goto codesegment.
196 adjust stack space. */ 196 adjust stack space. */
197 if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) ) 197 if ( !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)) )
198 //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) ) 198 //if ( !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl)) )
199 { 199 {
200 HOST_WIDE_INT padding; 200 HOST_WIDE_INT padding;
201 padding = CbC_PRETENDED_STACK_SIZE - 201 padding = CbC_PRETENDED_STACK_SIZE -
202 (crtl->args.size - crtl->args.pretend_args_size); 202 (crtl->args.size - crtl->args.pretend_args_size);
203 if (0&&padding > 0) 203 if (0&&padding > 0)
204 anti_adjust_stack (GEN_INT (padding)); 204 anti_adjust_stack (GEN_INT (padding));
205 } 205 }
206 206
207 /* Now that the stack is properly aligned, pops can't safely 207 /* Now that the stack is properly aligned, pops can't safely
208 be deferred during the evaluation of the arguments. */ 208 be deferred during the evaluation of the arguments. */
209 NO_DEFER_POP; 209 NO_DEFER_POP;
217 /* Precompute all register parameters. It isn't safe to compute anything 217 /* Precompute all register parameters. It isn't safe to compute anything
218 once we have started filling any specific hard regs. */ 218 once we have started filling any specific hard regs. */
219 precompute_register_parameters (num_actuals, args, &reg_parm_seen); 219 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
220 220
221 if (CALL_EXPR_STATIC_CHAIN (exp)) 221 if (CALL_EXPR_STATIC_CHAIN (exp))
222 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); 222 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
223 else 223 else
224 static_chain_value = 0; 224 static_chain_value = 0;
225 225
226 226
227 /* parallel assignment */ 227 /* parallel assignment */
228 store_order = alloca (num_actuals * sizeof (int)); 228 store_order = alloca (num_actuals * sizeof (int));
229 memset (store_order, 0, num_actuals * sizeof (int)); 229 memset (store_order, 0, num_actuals * sizeof (int));
230 230
231 /* fill the arg[i]->exprs. */ 231 /* fill the arg[i]->exprs. */
232 for (i = 0; i < num_actuals; i++) 232 for (i = 0; i < num_actuals; i++)
233 { 233 {
234 if (args[i].reg == 0 || args[i].pass_on_stack) 234 if (args[i].reg == 0 || args[i].pass_on_stack)
235 { 235 {
236 preexpand_argument_expr (&args[i], 236 preexpand_argument_expr (&args[i],
237 adjusted_args_size.var != 0); 237 adjusted_args_size.var != 0);
238 } 238 }
239 } 239 }
240 240
241 241
242 /* push overlapped argument to stack. */ 242 /* push overlapped argument to stack. */
243 push_overlaps(args, num_actuals); 243 push_overlaps(args, num_actuals);
249 249
250 /* push arguments in the order . */ 250 /* push arguments in the order . */
251 for (i = 0; i < num_actuals; i++) 251 for (i = 0; i < num_actuals; i++)
252 { 252 {
253 if (args[store_order[i]].reg == 0 253 if (args[store_order[i]].reg == 0
254 || args[store_order[i]].pass_on_stack 254 || args[store_order[i]].pass_on_stack
255 || args[store_order[i]].partial!=0 ) 255 || args[store_order[i]].partial!=0 )
256 { 256 {
257 expand_one_arg_push (&args[store_order[i]], argblock, flags, 257 expand_one_arg_push (&args[store_order[i]], argblock, flags,
258 adjusted_args_size.var != 0, 258 adjusted_args_size.var != 0,
259 reg_parm_stack_space); 259 reg_parm_stack_space);
260 } 260 }
261 } 261 }
262 262
263 263
264 /* If register arguments require space on the stack and stack space 264 /* If register arguments require space on the stack and stack space
265 was not preallocated, allocate stack space here for arguments 265 was not preallocated, allocate stack space here for arguments
269 //&& must_preallocate == 0 && reg_parm_stack_space > 0) 269 //&& must_preallocate == 0 && reg_parm_stack_space > 0)
270 //anti_adjust_stack (GEN_INT (reg_parm_stack_space)); 270 //anti_adjust_stack (GEN_INT (reg_parm_stack_space));
271 #endif 271 #endif
272 272
273 /* */ 273 /* */
274 funexp = prepare_call_address (funexp, static_chain_value, 274 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
275 &call_fusage, reg_parm_seen, pass == 0); 275 &call_fusage, reg_parm_seen, pass == 0);
276 276
277 /* store args into register. */ 277 /* store args into register. */
278 load_register_parameters (args, num_actuals, &call_fusage, flags, 278 load_register_parameters (args, num_actuals, &call_fusage, flags,
279 //pass == 0, &sibcall_failure); 279 //pass == 0, &sibcall_failure);
280 0, NULL); 280 0, NULL);
281 281
282 /* Save a pointer to the last insn before the call, so that we can 282 /* Save a pointer to the last insn before the call, so that we can
283 later safely search backwards to find the CALL_INSN. */ 283 later safely search backwards to find the CALL_INSN. */
284 before_call = get_last_insn (); 284 before_call = get_last_insn ();
285 285
286 /* Set up next argument register. For sibling calls on machines 286 /* Set up next argument register. For sibling calls on machines
287 with register windows this should be the incoming register. */ 287 with register windows this should be the incoming register. */
288 #ifdef FUNCTION_INCOMING_ARG 288 #ifdef FUNCTION_INCOMING_ARG
289 if (pass == 0) 289 if (pass == 0)
290 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode, 290 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
291 void_type_node, 1); 291 void_type_node, 1);
292 else 292 else
293 #endif 293 #endif
294 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, 294 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
295 void_type_node, 1); 295 void_type_node, 1);
296 296
297 /* All arguments and registers used for the call must be set up by 297 /* All arguments and registers used for the call must be set up by
298 now! */ 298 now! */
299 299
300 /* Stack must be properly aligned now. */ 300 /* Stack must be properly aligned now. */
301 gcc_assert (!pass 301 gcc_assert (!pass
302 || !(stack_pointer_delta % preferred_unit_stack_boundary)); 302 || !(stack_pointer_delta % preferred_unit_stack_boundary));
303 #if 0 303 #if 0
304 /* store environment. */ 304 /* store environment. */
305 if ( env_tree!=NULL ) 305 if ( env_tree!=NULL )
306 { 306 {
307 emit_insn (gen_rtx_CLOBBER (VOIDmode, 307 emit_insn (gen_rtx_CLOBBER (VOIDmode,
308 gen_rtx_MEM (BLKmode, 308 gen_rtx_MEM (BLKmode,
309 hard_frame_pointer_rtx))); 309 hard_frame_pointer_rtx)));
310 emit_move_insn (hard_frame_pointer_rtx, env_rtx); 310 emit_move_insn (hard_frame_pointer_rtx, env_rtx);
311 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); 311 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
312 //pop_temp_slots (); 312 //pop_temp_slots ();
313 313
314 emit_indirect_jump (funexp); 314 emit_indirect_jump (funexp);
318 { 318 {
319 push_temp_slots(); 319 push_temp_slots();
320 preserve_temp_slots(funexp); 320 preserve_temp_slots(funexp);
321 /* Generate the actual call instruction. */ 321 /* Generate the actual call instruction. */
322 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, 322 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
323 adjusted_args_size.constant, struct_value_size, 323 adjusted_args_size.constant, struct_value_size,
324 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, 324 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
325 next_arg_reg, valreg, 0, call_fusage, 325 next_arg_reg, valreg, 0, call_fusage,
326 flags, & args_so_far); 326 flags, & args_so_far);
327 pop_temp_slots(); 327 pop_temp_slots();
328 } 328 }
329 else 329 else
330 { 330 {
331 331
332 /* Generate the actual call instruction. */ 332 /* Generate the actual call instruction. */
333 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, 333 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
334 adjusted_args_size.constant, struct_value_size, 334 adjusted_args_size.constant, struct_value_size,
335 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, 335 //next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
336 next_arg_reg, valreg, 0, call_fusage, 336 next_arg_reg, valreg, 0, call_fusage,
337 flags, & args_so_far); 337 flags, & args_so_far);
338 } 338 }
339 339
340 /* If a non-BLKmode value is returned at the most significant end 340 /* If a non-BLKmode value is returned at the most significant end
341 of a register, shift the register right by the appropriate amount 341 of a register, shift the register right by the appropriate amount
342 and update VALREG accordingly. BLKmode values are handled by the 342 and update VALREG accordingly. BLKmode values are handled by the
345 && !pcc_struct_value 345 && !pcc_struct_value
346 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode 346 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
347 && targetm.calls.return_in_msb (TREE_TYPE (exp))) 347 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
348 { 348 {
349 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg)) 349 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
350 sibcall_failure = 1; 350 sibcall_failure = 1;
351 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg)); 351 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
352 } 352 }
353 353
354 354
355 /* For calls to `setjmp', etc., inform flow.c it should complain 355 /* For calls to `setjmp', etc., inform flow.c it should complain
357 inform flow that control does not fall through. */ 357 inform flow that control does not fall through. */
358 358
359 if ((flags & ECF_NORETURN) || pass == 0) 359 if ((flags & ECF_NORETURN) || pass == 0)
360 { 360 {
361 /* The barrier must be emitted 361 /* The barrier must be emitted
362 immediately after the CALL_INSN. Some ports emit more 362 immediately after the CALL_INSN. Some ports emit more
363 than just a CALL_INSN above, so we must search for it here. */ 363 than just a CALL_INSN above, so we must search for it here. */
364 364
365 rtx last = get_last_insn (); 365 rtx last = get_last_insn ();
366 while (!CALL_P (last)) 366 while (!CALL_P (last))
367 { 367 {
368 last = PREV_INSN (last); 368 last = PREV_INSN (last);
369 /* There was no CALL_INSN? */ 369 /* There was no CALL_INSN? */
370 gcc_assert (last != before_call); 370 gcc_assert (last != before_call);
371 } 371 }
372 372
373 emit_barrier_after (last); 373 emit_barrier_after (last);
374 374
375 /* Stack adjustments after a noreturn call are dead code. 375 /* Stack adjustments after a noreturn call are dead code.
376 However when NO_DEFER_POP is in effect, we must preserve 376 However when NO_DEFER_POP is in effect, we must preserve
377 stack_pointer_delta. */ 377 stack_pointer_delta. */
378 if (inhibit_defer_pop == 0) 378 if (inhibit_defer_pop == 0)
379 { 379 {
380 stack_pointer_delta = old_stack_allocated; 380 stack_pointer_delta = old_stack_allocated;
381 pending_stack_adjust = 0; 381 pending_stack_adjust = 0;
382 } 382 }
383 } 383 }
384 384
385 /* If value type not void, return an rtx for the value. */ 385 /* If value type not void, return an rtx for the value. */
386 386
387 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode 387 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
388 || ignore) 388 || ignore)
389 target = const0_rtx; 389 target = const0_rtx;
390 390
391 if (targetm.calls.promote_function_return(funtype)) 391 tree type = TREE_TYPE (exp);
392 int unsignedp = TYPE_UNSIGNED (type);
393
394 /* if (targetm.calls.promote_function_return(funtype))*/
395 if (promote_function_mode(type, TYPE_MODE (type), &unsignedp,
396 fndecl ? TREE_TYPE (fndecl) : fntype, 0))
392 { 397 {
393 /* If we promoted this return value, make the proper SUBREG. 398 /* If we promoted this return value, make the proper SUBREG.
394 TARGET might be const0_rtx here, so be careful. */ 399 TARGET might be const0_rtx here, so be careful. */
395 if (REG_P (target) 400 if (REG_P (target)
396 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode 401 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
397 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) 402 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
398 { 403 {
399 tree type = TREE_TYPE (exp); 404 int offset = 0;
400 int unsignedp = TYPE_UNSIGNED (type); 405 enum machine_mode pmode;
401 int offset = 0; 406
402 enum machine_mode pmode; 407 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);//, 1);
403 408 /* If we don't promote as expected, something is wrong. */
404 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); 409 gcc_assert (GET_MODE (target) == pmode);
405 /* If we don't promote as expected, something is wrong. */ 410
406 gcc_assert (GET_MODE (target) == pmode); 411 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
407 412 && (GET_MODE_SIZE (GET_MODE (target))
408 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN) 413 > GET_MODE_SIZE (TYPE_MODE (type))))
409 && (GET_MODE_SIZE (GET_MODE (target)) 414 {
410 > GET_MODE_SIZE (TYPE_MODE (type)))) 415 offset = GET_MODE_SIZE (GET_MODE (target))
411 { 416 - GET_MODE_SIZE (TYPE_MODE (type));
412 offset = GET_MODE_SIZE (GET_MODE (target)) 417 if (! BYTES_BIG_ENDIAN)
413 - GET_MODE_SIZE (TYPE_MODE (type)); 418 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
414 if (! BYTES_BIG_ENDIAN) 419 else if (! WORDS_BIG_ENDIAN)
415 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; 420 offset %= UNITS_PER_WORD;
416 else if (! WORDS_BIG_ENDIAN) 421 }
417 offset %= UNITS_PER_WORD; 422 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
418 } 423 SUBREG_PROMOTED_VAR_P (target) = 1;
419 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); 424 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
420 SUBREG_PROMOTED_VAR_P (target) = 1; 425 }
421 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
422 }
423 } 426 }
424 427
425 /* If size of args is variable or this was a constructor call for a stack 428 /* If size of args is variable or this was a constructor call for a stack
426 argument, restore saved stack-pointer value. */ 429 argument, restore saved stack-pointer value. */
427 430
477 } 480 }
478 481
479 482
480 static void 483 static void
481 preexpand_argument_expr (struct arg_data *arg, 484 preexpand_argument_expr (struct arg_data *arg,
482 int variable_size ATTRIBUTE_UNUSED) 485 int variable_size ATTRIBUTE_UNUSED)
483 { 486 {
484 tree pval = arg->tree_value; 487 tree pval = arg->tree_value;
485 rtx reg = 0; 488 rtx reg = 0;
486 int partial = 0; 489 int partial = 0;
487 490
519 522
520 if (arg->pass_on_stack) 523 if (arg->pass_on_stack)
521 stack_arg_under_construction++; 524 stack_arg_under_construction++;
522 525
523 arg->value = expand_expr (pval, 526 arg->value = expand_expr (pval,
524 (partial 527 (partial
525 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) 528 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
526 ? NULL_RTX : arg->stack, 529 ? NULL_RTX : arg->stack,
527 VOIDmode, EXPAND_STACK_PARM); 530 VOIDmode, EXPAND_STACK_PARM);
528 531
529 /* If we are promoting object (or for any other reason) the mode 532 /* If we are promoting object (or for any other reason) the mode
530 doesn't agree, convert the mode. */ 533 doesn't agree, convert the mode. */
531 534
532 if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) 535 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
533 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), 536 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
534 arg->value, arg->unsignedp); 537 arg->value, arg->unsignedp);
535 538
536 if (arg->pass_on_stack) 539 if (arg->pass_on_stack)
537 stack_arg_under_construction--; 540 stack_arg_under_construction--;
538 541
539 arg->exprs = get_insns (); 542 arg->exprs = get_insns ();
547 return ; 550 return ;
548 } 551 }
549 552
550 static int 553 static int
551 expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags, 554 expand_one_arg_push (struct arg_data *arg, rtx argblock, int flags,
552 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) 555 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
553 { 556 {
554 tree pval = arg->tree_value; 557 tree pval = arg->tree_value;
555 int used = 0; 558 int used = 0;
556 int i, lower_bound = 0, upper_bound = 0; 559 int i, lower_bound = 0, upper_bound = 0;
557 rtx reg = 0; 560 rtx reg = 0;
590 else if (arg->mode != BLKmode) 593 else if (arg->mode != BLKmode)
591 { 594 {
592 int size; 595 int size;
593 596
594 /* Argument is a scalar, not entirely passed in registers. 597 /* Argument is a scalar, not entirely passed in registers.
595 (If part is passed in registers, arg->partial says how much 598 (If part is passed in registers, arg->partial says how much
596 and emit_push_insn will take care of putting it there.) 599 and emit_push_insn will take care of putting it there.)
597 600
598 Push it, and if its size is less than the 601 Push it, and if its size is less than the
599 amount of space allocated to it, 602 amount of space allocated to it,
600 also bump stack pointer by the additional space. 603 also bump stack pointer by the additional space.
601 Note that in C the default argument promotions 604 Note that in C the default argument promotions
602 will prevent such mismatches. */ 605 will prevent such mismatches. */
603 606
604 size = GET_MODE_SIZE (arg->mode); 607 size = GET_MODE_SIZE (arg->mode);
605 /* Compute how much space the push instruction will push. 608 /* Compute how much space the push instruction will push.
606 On many machines, pushing a byte will advance the stack 609 On many machines, pushing a byte will advance the stack
607 pointer by a halfword. */ 610 pointer by a halfword. */
608 #ifdef PUSH_ROUNDING 611 #ifdef PUSH_ROUNDING
609 size = PUSH_ROUNDING (size); 612 size = PUSH_ROUNDING (size);
610 #endif 613 #endif
611 used = size; 614 used = size;
612 615
613 /* Compute how much space the argument should get: 616 /* Compute how much space the argument should get:
614 round up to a multiple of the alignment for arguments. */ 617 round up to a multiple of the alignment for arguments. */
615 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval))) 618 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
616 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) 619 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
617 / (PARM_BOUNDARY / BITS_PER_UNIT)) 620 / (PARM_BOUNDARY / BITS_PER_UNIT))
618 * (PARM_BOUNDARY / BITS_PER_UNIT)); 621 * (PARM_BOUNDARY / BITS_PER_UNIT));
619 622
620 /* This isn't already where we want it on the stack, so put it there. 623 /* This isn't already where we want it on the stack, so put it there.
621 This can either be done with push or copy insns. */ 624 This can either be done with push or copy insns. */
622 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 625 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
623 PARM_BOUNDARY, partial, reg, used - size, argblock, 626 PARM_BOUNDARY, partial, reg, used - size, argblock,
624 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, 627 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
625 ARGS_SIZE_RTX (arg->locate.alignment_pad)); 628 ARGS_SIZE_RTX (arg->locate.alignment_pad));
626 629
627 /* Unless this is a partially-in-register argument, the argument is now 630 /* Unless this is a partially-in-register argument, the argument is now
628 in the stack. */ 631 in the stack. */
629 if (partial == 0) 632 if (partial == 0)
630 arg->value = arg->stack; 633 arg->value = arg->stack;
631 } 634 }
632 else 635 else
633 { 636 {
634 /* BLKmode, at least partly to be pushed. */ 637 /* BLKmode, at least partly to be pushed. */
635 638
636 unsigned int parm_align; 639 unsigned int parm_align;
637 int excess; 640 int excess;
638 rtx size_rtx; 641 rtx size_rtx;
639 642
640 /* Pushing a nonscalar. 643 /* Pushing a nonscalar.
641 If part is passed in registers, PARTIAL says how much 644 If part is passed in registers, PARTIAL says how much
642 and emit_push_insn will take care of putting it there. */ 645 and emit_push_insn will take care of putting it there. */
643 646
644 /* Round its size up to a multiple 647 /* Round its size up to a multiple
645 of the allocation unit for arguments. */ 648 of the allocation unit for arguments. */
646 649
647 if (arg->locate.size.var != 0) 650 if (arg->locate.size.var != 0)
648 { 651 {
649 excess = 0; 652 excess = 0;
650 size_rtx = ARGS_SIZE_RTX (arg->locate.size); 653 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
651 } 654 }
652 else 655 else
653 { 656 {
654 /* PUSH_ROUNDING has no effect on us, because emit_push_insn 657 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
655 for BLKmode is careful to avoid it. */ 658 for BLKmode is careful to avoid it. */
656 excess = (arg->locate.size.constant 659 excess = (arg->locate.size.constant
657 - int_size_in_bytes (TREE_TYPE (pval)) 660 - int_size_in_bytes (TREE_TYPE (pval))
658 + partial); 661 + partial);
659 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), 662 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
660 NULL_RTX, TYPE_MODE (sizetype), 0); 663 NULL_RTX, TYPE_MODE (sizetype), 0);
661 } 664 }
662 665
663 parm_align = arg->locate.boundary; 666 parm_align = arg->locate.boundary;
664 667
665 /* When an argument is padded down, the block is aligned to 668 /* When an argument is padded down, the block is aligned to
666 PARM_BOUNDARY, but the actual argument isn't. */ 669 PARM_BOUNDARY, but the actual argument isn't. */
667 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) 670 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
668 { 671 {
669 if (arg->locate.size.var) 672 if (arg->locate.size.var)
670 parm_align = BITS_PER_UNIT; 673 parm_align = BITS_PER_UNIT;
671 else if (excess) 674 else if (excess)
672 { 675 {
673 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT; 676 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
674 parm_align = MIN (parm_align, excess_align); 677 parm_align = MIN (parm_align, excess_align);
675 } 678 }
676 } 679 }
677 680
678 if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) 681 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
679 { 682 {
680 /* emit_push_insn might not work properly if arg->value and 683 /* emit_push_insn might not work properly if arg->value and
681 argblock + arg->locate.offset areas overlap. */ 684 argblock + arg->locate.offset areas overlap. */
682 rtx x = arg->value; 685 rtx x = arg->value;
683 int i = 0; 686 int i = 0;
684 687
685 if (XEXP (x, 0) == crtl->args.internal_arg_pointer 688 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
686 || (GET_CODE (XEXP (x, 0)) == PLUS 689 || (GET_CODE (XEXP (x, 0)) == PLUS
687 && XEXP (XEXP (x, 0), 0) == 690 && XEXP (XEXP (x, 0), 0) ==
688 crtl->args.internal_arg_pointer 691 crtl->args.internal_arg_pointer
689 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) 692 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
690 { 693 {
691 if (XEXP (x, 0) != crtl->args.internal_arg_pointer) 694 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
692 i = INTVAL (XEXP (XEXP (x, 0), 1)); 695 i = INTVAL (XEXP (XEXP (x, 0), 1));
693 696
694 /* expand_call should ensure this. */ 697 /* expand_call should ensure this. */
695 gcc_assert (!arg->locate.offset.var 698 gcc_assert (!arg->locate.offset.var
696 && GET_CODE (size_rtx) == CONST_INT); 699 && GET_CODE (size_rtx) == CONST_INT);
697 } 700 }
698 } 701 }
699 702
700 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, 703 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
701 parm_align, partial, reg, excess, argblock, 704 parm_align, partial, reg, excess, argblock,
702 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, 705 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
703 ARGS_SIZE_RTX (arg->locate.alignment_pad)); 706 ARGS_SIZE_RTX (arg->locate.alignment_pad));
704 707
705 /* Unless this is a partially-in-register argument, the argument is now 708 /* Unless this is a partially-in-register argument, the argument is now
706 in the stack. 709 in the stack.
707 710
708 ??? Unlike the case above, in which we want the actual 711 ??? Unlike the case above, in which we want the actual
709 address of the data, so that we can load it directly into a 712 address of the data, so that we can load it directly into a
710 register, here we want the address of the stack slot, so that 713 register, here we want the address of the stack slot, so that
711 it's properly aligned for word-by-word copying or something 714 it's properly aligned for word-by-word copying or something
712 like that. It's not clear that this is always correct. */ 715 like that. It's not clear that this is always correct. */
713 if (partial == 0) 716 if (partial == 0)
714 arg->value = arg->stack_slot; 717 arg->value = arg->stack_slot;
715 } 718 }
716 719
717 if (arg->reg && GET_CODE (arg->reg) == PARALLEL) 720 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
718 { 721 {
719 tree type = TREE_TYPE (arg->tree_value); 722 tree type = TREE_TYPE (arg->tree_value);
720 arg->parallel_value 723 arg->parallel_value
721 = emit_group_load_into_temps (arg->reg, arg->value, type, 724 = emit_group_load_into_temps (arg->reg, arg->value, type,
722 int_size_in_bytes (type)); 725 int_size_in_bytes (type));
723 } 726 }
724 727
725 /* Mark all slots this store used. */ 728 /* Mark all slots this store used. */
726 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) 729 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
727 && argblock && ! variable_size && arg->stack) 730 && argblock && ! variable_size && arg->stack)
745 static void 748 static void
746 determine_order(int *order, int num_actuals) 749 determine_order(int *order, int num_actuals)
747 { 750 {
748 int i; 751 int i;
749 for (i=0; i<num_actuals; i++) 752 for (i=0; i<num_actuals; i++)
750 order[i] = num_actuals-i-1; 753 order[i] = num_actuals-i-1;
751 return; 754 return;
752 } 755 }
753 756
754 static void 757 static void
755 push_overlaps(struct arg_data *args, int num_actuals) 758 push_overlaps(struct arg_data *args, int num_actuals)
765 if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue; 768 if ( (src_offset=check_frame_offset(args[i].value)) < 0 ) continue;
766 769
767 /* 退避 */ 770 /* 退避 */
768 temp = assign_temp(args[i].tree_value, 1, 0, 0); 771 temp = assign_temp(args[i].tree_value, 1, 0, 0);
769 if ( args[i].mode==BLKmode ) 772 if ( args[i].mode==BLKmode )
770 emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 ); 773 emit_block_move ( temp, args[i].value, ARGS_SIZE_RTX(args[i].locate.size), 0 );
771 else 774 else
772 emit_move_insn ( temp, args[i].value ); 775 emit_move_insn ( temp, args[i].value );
773 args[i].value = temp; 776 args[i].value = temp;
774 777
775 } 778 }
776 return; 779 return;
777 } 780 }
780 check_frame_offset(rtx x) 783 check_frame_offset(rtx x)
781 { 784 {
782 int i; 785 int i;
783 rtx addr; 786 rtx addr;
784 if ( !x || !MEM_P(x)) 787 if ( !x || !MEM_P(x))
785 return -1; 788 return -1;
786 789
787 addr = XEXP(x, 0); 790 addr = XEXP(x, 0);
788 if (addr == crtl->args.internal_arg_pointer) 791 if (addr == crtl->args.internal_arg_pointer)
789 i = 0; 792 i = 0;
790 else if (GET_CODE (addr) == PLUS 793 else if (GET_CODE (addr) == PLUS
791 && XEXP (addr, 0) == crtl->args.internal_arg_pointer 794 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
792 && GET_CODE (XEXP (addr, 1)) == CONST_INT) 795 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
793 i = INTVAL (XEXP (addr, 1)); 796 i = INTVAL (XEXP (addr, 1));
794 else if (GET_CODE (addr) == PLUS 797 else if (GET_CODE (addr) == PLUS
795 && GET_CODE (XEXP (addr, 0)) == CONST_INT 798 && GET_CODE (XEXP (addr, 0)) == CONST_INT
796 && XEXP (addr, 1) == crtl->args.internal_arg_pointer ) 799 && XEXP (addr, 1) == crtl->args.internal_arg_pointer )
797 i = INTVAL (XEXP (addr, 0)); 800 i = INTVAL (XEXP (addr, 0));
798 else 801 else
799 return -1; 802 return -1;
800 803
801 return i; 804 return i;
802 } 805 }
803 806