0
|
1 /* Definitions for code generation pass of GNU compiler.
|
|
2 Copyright (C) 1987, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
|
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
|
|
4 Free Software Foundation, Inc.
|
|
5
|
|
6 This file is part of GCC.
|
|
7
|
|
8 GCC is free software; you can redistribute it and/or modify it under
|
|
9 the terms of the GNU General Public License as published by the Free
|
|
10 Software Foundation; either version 3, or (at your option) any later
|
|
11 version.
|
|
12
|
|
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
16 for more details.
|
|
17
|
|
18 You should have received a copy of the GNU General Public License
|
|
19 along with GCC; see the file COPYING3. If not see
|
|
20 <http://www.gnu.org/licenses/>. */
|
|
21
|
|
22 #ifndef GCC_EXPR_H
|
|
23 #define GCC_EXPR_H
|
|
24
|
|
25 /* For inhibit_defer_pop */
|
|
26 #include "function.h"
|
|
27 /* For XEXP, GEN_INT, rtx_code */
|
|
28 #include "rtl.h"
|
|
29 /* For optimize_size */
|
|
30 #include "flags.h"
|
|
31 /* For host_integerp, tree_low_cst, fold_convert, size_binop, ssize_int,
|
|
32 TREE_CODE, TYPE_SIZE, int_size_in_bytes, */
|
|
33 #include "tree.h"
|
|
34 /* For GET_MODE_BITSIZE, word_mode */
|
|
35 #include "machmode.h"
|
|
36
|
|
37 /* The default branch cost is 1. */
|
|
38 #ifndef BRANCH_COST
|
|
39 #define BRANCH_COST(speed_p, predictable_p) 1
|
|
40 #endif
|
|
41
|
|
42 /* This is the 4th arg to `expand_expr'.
|
|
43 EXPAND_STACK_PARM means we are possibly expanding a call param onto
|
|
44 the stack.
|
|
45 EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
|
|
46 EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
|
|
47 EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
|
|
48 is a constant that is not a legitimate address.
|
|
49 EXPAND_WRITE means we are only going to write to the resulting rtx.
|
|
50 EXPAND_MEMORY means we are interested in a memory result, even if
|
|
51 the memory is constant and we could have propagated a constant value. */
|
|
52 enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
|
|
53 EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
|
|
54 EXPAND_MEMORY};
|
|
55
|
|
56 /* Prevent the compiler from deferring stack pops. See
|
|
57 inhibit_defer_pop for more information. */
|
|
58 #define NO_DEFER_POP (inhibit_defer_pop += 1)
|
|
59
|
|
60 /* Allow the compiler to defer stack pops. See inhibit_defer_pop for
|
|
61 more information. */
|
|
62 #define OK_DEFER_POP (inhibit_defer_pop -= 1)
|
|
63
|
|
64 /* If a memory-to-memory move would take MOVE_RATIO or more simple
|
|
65 move-instruction sequences, we will do a movmem or libcall instead. */
|
|
66
|
|
67 #ifndef MOVE_RATIO
|
|
68 #if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
|
|
69 #define MOVE_RATIO(speed) 2
|
|
70 #else
|
|
71 /* If we are optimizing for space (-Os), cut down the default move ratio. */
|
|
72 #define MOVE_RATIO(speed) ((speed) ? 15 : 3)
|
|
73 #endif
|
|
74 #endif
|
|
75
|
|
76 /* If a clear memory operation would take CLEAR_RATIO or more simple
|
|
77 move-instruction sequences, we will do a setmem or libcall instead. */
|
|
78
|
|
79 #ifndef CLEAR_RATIO
|
|
80 #if defined (HAVE_setmemqi) || defined (HAVE_setmemhi) || defined (HAVE_setmemsi) || defined (HAVE_setmemdi) || defined (HAVE_setmemti)
|
|
81 #define CLEAR_RATIO(speed) 2
|
|
82 #else
|
|
83 /* If we are optimizing for space, cut down the default clear ratio. */
|
|
84 #define CLEAR_RATIO(speed) ((speed) ? 15 :3)
|
|
85 #endif
|
|
86 #endif
|
|
87
|
|
88 /* If a memory set (to value other than zero) operation would take
|
|
89 SET_RATIO or more simple move-instruction sequences, we will do a movmem
|
|
90 or libcall instead. */
|
|
91 #ifndef SET_RATIO
|
|
92 #define SET_RATIO(speed) MOVE_RATIO(speed)
|
|
93 #endif
|
|
94
|
|
95 enum direction {none, upward, downward};
|
|
96
|
|
97 /* Structure to record the size of a sequence of arguments
|
|
98 as the sum of a tree-expression and a constant. This structure is
|
|
99 also used to store offsets from the stack, which might be negative,
|
|
100 so the variable part must be ssizetype, not sizetype. */
|
|
101
|
|
102 struct args_size
|
|
103 {
|
|
104 HOST_WIDE_INT constant;
|
|
105 tree var;
|
|
106 };
|
|
107
|
|
108 /* Package up various arg related fields of struct args for
|
|
109 locate_and_pad_parm. */
|
|
110 struct locate_and_pad_arg_data
|
|
111 {
|
|
112 /* Size of this argument on the stack, rounded up for any padding it
|
|
113 gets. If REG_PARM_STACK_SPACE is defined, then register parms are
|
|
114 counted here, otherwise they aren't. */
|
|
115 struct args_size size;
|
|
116 /* Offset of this argument from beginning of stack-args. */
|
|
117 struct args_size offset;
|
|
118 /* Offset to the start of the stack slot. Different from OFFSET
|
|
119 if this arg pads downward. */
|
|
120 struct args_size slot_offset;
|
|
121 /* The amount that the stack pointer needs to be adjusted to
|
|
122 force alignment for the next argument. */
|
|
123 struct args_size alignment_pad;
|
|
124 /* Which way we should pad this arg. */
|
|
125 enum direction where_pad;
|
|
126 /* slot_offset is at least this aligned. */
|
|
127 unsigned int boundary;
|
|
128 };
|
|
129
|
|
130 /* Add the value of the tree INC to the `struct args_size' TO. */
|
|
131
|
|
132 #define ADD_PARM_SIZE(TO, INC) \
|
|
133 do { \
|
|
134 tree inc = (INC); \
|
|
135 if (host_integerp (inc, 0)) \
|
|
136 (TO).constant += tree_low_cst (inc, 0); \
|
|
137 else if ((TO).var == 0) \
|
|
138 (TO).var = fold_convert (ssizetype, inc); \
|
|
139 else \
|
|
140 (TO).var = size_binop (PLUS_EXPR, (TO).var, \
|
|
141 fold_convert (ssizetype, inc)); \
|
|
142 } while (0)
|
|
143
|
|
144 #define SUB_PARM_SIZE(TO, DEC) \
|
|
145 do { \
|
|
146 tree dec = (DEC); \
|
|
147 if (host_integerp (dec, 0)) \
|
|
148 (TO).constant -= tree_low_cst (dec, 0); \
|
|
149 else if ((TO).var == 0) \
|
|
150 (TO).var = size_binop (MINUS_EXPR, ssize_int (0), \
|
|
151 fold_convert (ssizetype, dec)); \
|
|
152 else \
|
|
153 (TO).var = size_binop (MINUS_EXPR, (TO).var, \
|
|
154 fold_convert (ssizetype, dec)); \
|
|
155 } while (0)
|
|
156
|
|
157 /* Convert the implicit sum in a `struct args_size' into a tree
|
|
158 of type ssizetype. */
|
|
159 #define ARGS_SIZE_TREE(SIZE) \
|
|
160 ((SIZE).var == 0 ? ssize_int ((SIZE).constant) \
|
|
161 : size_binop (PLUS_EXPR, fold_convert (ssizetype, (SIZE).var), \
|
|
162 ssize_int ((SIZE).constant)))
|
|
163
|
|
164 /* Convert the implicit sum in a `struct args_size' into an rtx. */
|
|
165 #define ARGS_SIZE_RTX(SIZE) \
|
|
166 ((SIZE).var == 0 ? GEN_INT ((SIZE).constant) \
|
|
167 : expand_normal (ARGS_SIZE_TREE (SIZE)))
|
|
168
|
|
169 /* Supply a default definition for FUNCTION_ARG_PADDING:
|
|
170 usually pad upward, but pad short args downward on
|
|
171 big-endian machines. */
|
|
172
|
|
173 #define DEFAULT_FUNCTION_ARG_PADDING(MODE, TYPE) \
|
|
174 (! BYTES_BIG_ENDIAN \
|
|
175 ? upward \
|
|
176 : (((MODE) == BLKmode \
|
|
177 ? ((TYPE) && TREE_CODE (TYPE_SIZE (TYPE)) == INTEGER_CST \
|
|
178 && int_size_in_bytes (TYPE) < (PARM_BOUNDARY / BITS_PER_UNIT)) \
|
|
179 : GET_MODE_BITSIZE (MODE) < PARM_BOUNDARY) \
|
|
180 ? downward : upward))
|
|
181
|
|
182 #ifndef FUNCTION_ARG_PADDING
|
|
183 #define FUNCTION_ARG_PADDING(MODE, TYPE) \
|
|
184 DEFAULT_FUNCTION_ARG_PADDING ((MODE), (TYPE))
|
|
185 #endif
|
|
186
|
|
187 /* Supply a default definition for FUNCTION_ARG_BOUNDARY. Normally, we let
|
|
188 FUNCTION_ARG_PADDING, which also pads the length, handle any needed
|
|
189 alignment. */
|
|
190
|
|
191 #ifndef FUNCTION_ARG_BOUNDARY
|
|
192 #define FUNCTION_ARG_BOUNDARY(MODE, TYPE) PARM_BOUNDARY
|
|
193 #endif
|
|
194
|
|
195 /* Supply a default definition of STACK_SAVEAREA_MODE for emit_stack_save.
|
|
196 Normally move_insn, so Pmode stack pointer. */
|
|
197
|
|
198 #ifndef STACK_SAVEAREA_MODE
|
|
199 #define STACK_SAVEAREA_MODE(LEVEL) Pmode
|
|
200 #endif
|
|
201
|
|
202 /* Supply a default definition of STACK_SIZE_MODE for
|
|
203 allocate_dynamic_stack_space. Normally PLUS/MINUS, so word_mode. */
|
|
204
|
|
205 #ifndef STACK_SIZE_MODE
|
|
206 #define STACK_SIZE_MODE word_mode
|
|
207 #endif
|
|
208
|
|
209 /* Provide default values for the macros controlling stack checking. */
|
|
210
|
|
211 /* The default is neither full builtin stack checking... */
|
|
212 #ifndef STACK_CHECK_BUILTIN
|
|
213 #define STACK_CHECK_BUILTIN 0
|
|
214 #endif
|
|
215
|
|
216 /* ...nor static builtin stack checking. */
|
|
217 #ifndef STACK_CHECK_STATIC_BUILTIN
|
|
218 #define STACK_CHECK_STATIC_BUILTIN 0
|
|
219 #endif
|
|
220
|
|
221 /* The default interval is one page. */
|
|
222 #ifndef STACK_CHECK_PROBE_INTERVAL
|
|
223 #define STACK_CHECK_PROBE_INTERVAL 4096
|
|
224 #endif
|
|
225
|
|
226 /* The default is to do a store into the stack. */
|
|
227 #ifndef STACK_CHECK_PROBE_LOAD
|
|
228 #define STACK_CHECK_PROBE_LOAD 0
|
|
229 #endif
|
|
230
|
|
231 /* This is a kludge to try to capture the discrepancy between the old
|
|
232 mechanism (generic stack checking) and the new mechanism (static
|
|
233 builtin stack checking). STACK_CHECK_PROTECT needs to be bumped
|
|
234 for the latter because part of the protection area is effectively
|
|
235 included in STACK_CHECK_MAX_FRAME_SIZE for the former. */
|
|
236 #ifdef STACK_CHECK_PROTECT
|
|
237 #define STACK_OLD_CHECK_PROTECT STACK_CHECK_PROTECT
|
|
238 #else
|
|
239 #define STACK_OLD_CHECK_PROTECT \
|
|
240 (USING_SJLJ_EXCEPTIONS ? 75 * UNITS_PER_WORD : 8 * 1024)
|
|
241 #endif
|
|
242
|
|
243 /* Minimum amount of stack required to recover from an anticipated stack
|
|
244 overflow detection. The default value conveys an estimate of the amount
|
|
245 of stack required to propagate an exception. */
|
|
246 #ifndef STACK_CHECK_PROTECT
|
|
247 #define STACK_CHECK_PROTECT \
|
|
248 (USING_SJLJ_EXCEPTIONS ? 75 * UNITS_PER_WORD : 12 * 1024)
|
|
249 #endif
|
|
250
|
|
251 /* Make the maximum frame size be the largest we can and still only need
|
|
252 one probe per function. */
|
|
253 #ifndef STACK_CHECK_MAX_FRAME_SIZE
|
|
254 #define STACK_CHECK_MAX_FRAME_SIZE \
|
|
255 (STACK_CHECK_PROBE_INTERVAL - UNITS_PER_WORD)
|
|
256 #endif
|
|
257
|
|
258 /* This is arbitrary, but should be large enough everywhere. */
|
|
259 #ifndef STACK_CHECK_FIXED_FRAME_SIZE
|
|
260 #define STACK_CHECK_FIXED_FRAME_SIZE (4 * UNITS_PER_WORD)
|
|
261 #endif
|
|
262
|
|
263 /* Provide a reasonable default for the maximum size of an object to
|
|
264 allocate in the fixed frame. We may need to be able to make this
|
|
265 controllable by the user at some point. */
|
|
266 #ifndef STACK_CHECK_MAX_VAR_SIZE
|
|
267 #define STACK_CHECK_MAX_VAR_SIZE (STACK_CHECK_MAX_FRAME_SIZE / 100)
|
|
268 #endif
|
|
269
|
|
270 /* Functions from optabs.c, commonly used, and without need for the optabs
|
|
271 tables: */
|
|
272
|
|
273 /* Passed to expand_simple_binop and expand_binop to say which options
|
|
274 to try to use if the requested operation can't be open-coded on the
|
|
275 requisite mode. Either OPTAB_LIB or OPTAB_LIB_WIDEN says try using
|
|
276 a library call. Either OPTAB_WIDEN or OPTAB_LIB_WIDEN says try
|
|
277 using a wider mode. OPTAB_MUST_WIDEN says try widening and don't
|
|
278 try anything else. */
|
|
279
|
|
280 enum optab_methods
|
|
281 {
|
|
282 OPTAB_DIRECT,
|
|
283 OPTAB_LIB,
|
|
284 OPTAB_WIDEN,
|
|
285 OPTAB_LIB_WIDEN,
|
|
286 OPTAB_MUST_WIDEN
|
|
287 };
|
|
288
|
|
289 /* Generate code for a simple binary or unary operation. "Simple" in
|
|
290 this case means "can be unambiguously described by a (mode, code)
|
|
291 pair and mapped to a single optab." */
|
|
292 extern rtx expand_simple_binop (enum machine_mode, enum rtx_code, rtx,
|
|
293 rtx, rtx, int, enum optab_methods);
|
|
294 extern rtx expand_simple_unop (enum machine_mode, enum rtx_code, rtx, rtx,
|
|
295 int);
|
|
296
|
|
297 /* Report whether the machine description contains an insn which can
|
|
298 perform the operation described by CODE and MODE. */
|
|
299 extern int have_insn_for (enum rtx_code, enum machine_mode);
|
|
300
|
|
301 /* Emit code to make a call to a constant function or a library call. */
|
|
302 extern void emit_libcall_block (rtx, rtx, rtx, rtx);
|
|
303
|
|
304 /* Create but don't emit one rtl instruction to perform certain operations.
|
|
305 Modes must match; operands must meet the operation's predicates.
|
|
306 Likewise for subtraction and for just copying. */
|
|
307 extern rtx gen_add2_insn (rtx, rtx);
|
|
308 extern rtx gen_add3_insn (rtx, rtx, rtx);
|
|
309 extern rtx gen_sub2_insn (rtx, rtx);
|
|
310 extern rtx gen_sub3_insn (rtx, rtx, rtx);
|
|
311 extern rtx gen_move_insn (rtx, rtx);
|
|
312 extern int have_add2_insn (rtx, rtx);
|
|
313 extern int have_sub2_insn (rtx, rtx);
|
|
314
|
|
315 /* Emit a pair of rtl insns to compare two rtx's and to jump
|
|
316 to a label if the comparison is true. */
|
|
317 extern void emit_cmp_and_jump_insns (rtx, rtx, enum rtx_code, rtx,
|
|
318 enum machine_mode, int, rtx);
|
|
319
|
|
320 /* Generate code to indirectly jump to a location given in the rtx LOC. */
|
|
321 extern void emit_indirect_jump (rtx);
|
|
322
|
|
323 /* Generate a conditional trap instruction. */
|
|
324 extern rtx gen_cond_trap (enum rtx_code, rtx, rtx, rtx);
|
|
325
|
|
326 #include "insn-config.h"
|
|
327
|
|
328 #ifdef HAVE_conditional_move
|
|
329 /* Emit a conditional move operation. */
|
|
330 rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
|
331 rtx, rtx, enum machine_mode, int);
|
|
332
|
|
333 /* Return nonzero if the conditional move is supported. */
|
|
334 int can_conditionally_move_p (enum machine_mode mode);
|
|
335
|
|
336 #endif
|
|
337 rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
|
338 rtx, rtx, enum machine_mode, int);
|
|
339
|
|
340 rtx expand_val_compare_and_swap (rtx, rtx, rtx, rtx);
|
|
341 rtx expand_bool_compare_and_swap (rtx, rtx, rtx, rtx);
|
|
342 rtx expand_sync_operation (rtx, rtx, enum rtx_code);
|
|
343 rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx);
|
|
344 rtx expand_sync_lock_test_and_set (rtx, rtx, rtx);
|
|
345
|
|
346 /* Functions from expmed.c: */
|
|
347
|
|
348 /* Arguments MODE, RTX: return an rtx for the negation of that value.
|
|
349 May emit insns. */
|
|
350 extern rtx negate_rtx (enum machine_mode, rtx);
|
|
351
|
|
352 /* Expand a logical AND operation. */
|
|
353 extern rtx expand_and (enum machine_mode, rtx, rtx, rtx);
|
|
354
|
|
355 /* Emit a store-flag operation. */
|
|
356 extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
|
357 int, int);
|
|
358
|
|
359 /* Like emit_store_flag, but always succeeds. */
|
|
360 extern rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx,
|
|
361 enum machine_mode, int, int);
|
|
362
|
|
363 /* Functions from builtins.c: */
|
|
364 extern rtx expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
|
365 extern tree std_build_builtin_va_list (void);
|
|
366 extern tree std_fn_abi_va_list (tree);
|
|
367 extern tree std_canonical_va_list_type (tree);
|
|
368
|
|
369 extern void std_expand_builtin_va_start (tree, rtx);
|
|
370 extern rtx default_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
|
371 extern void expand_builtin_setjmp_setup (rtx, rtx);
|
|
372 extern void expand_builtin_setjmp_receiver (rtx);
|
|
373 extern rtx expand_builtin_saveregs (void);
|
|
374 extern void expand_builtin_trap (void);
|
|
375 extern rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
|
|
376
|
|
377 /* Functions from expr.c: */
|
|
378
|
|
379 /* This is run during target initialization to set up which modes can be
|
|
380 used directly in memory and to initialize the block move optab. */
|
|
381 extern void init_expr_target (void);
|
|
382
|
|
383 /* This is run at the start of compiling a function. */
|
|
384 extern void init_expr (void);
|
|
385
|
|
386 /* Emit some rtl insns to move data between rtx's, converting machine modes.
|
|
387 Both modes must be floating or both fixed. */
|
|
388 extern void convert_move (rtx, rtx, int);
|
|
389
|
|
390 /* Convert an rtx to specified machine mode and return the result. */
|
|
391 extern rtx convert_to_mode (enum machine_mode, rtx, int);
|
|
392
|
|
393 /* Convert an rtx to MODE from OLDMODE and return the result. */
|
|
394 extern rtx convert_modes (enum machine_mode, enum machine_mode, rtx, int);
|
|
395
|
|
396 /* Emit code to move a block Y to a block X. */
|
|
397
|
|
398 enum block_op_methods
|
|
399 {
|
|
400 BLOCK_OP_NORMAL,
|
|
401 BLOCK_OP_NO_LIBCALL,
|
|
402 BLOCK_OP_CALL_PARM,
|
|
403 /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */
|
|
404 BLOCK_OP_TAILCALL
|
|
405 };
|
|
406
|
|
407 extern GTY(()) tree block_clear_fn;
|
|
408 extern void init_block_move_fn (const char *);
|
|
409 extern void init_block_clear_fn (const char *);
|
|
410
|
|
411 extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
|
|
412 extern rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
|
|
413 extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
|
|
414 unsigned int, HOST_WIDE_INT);
|
|
415
|
|
416 /* Copy all or part of a value X into registers starting at REGNO.
|
|
417 The number of registers to be filled is NREGS. */
|
|
418 extern void move_block_to_reg (int, rtx, int, enum machine_mode);
|
|
419
|
|
420 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
|
|
421 The number of registers to be filled is NREGS. */
|
|
422 extern void move_block_from_reg (int, rtx, int);
|
|
423
|
|
424 /* Generate a non-consecutive group of registers represented by a PARALLEL. */
|
|
425 extern rtx gen_group_rtx (rtx);
|
|
426
|
|
427 /* Load a BLKmode value into non-consecutive registers represented by a
|
|
428 PARALLEL. */
|
|
429 extern void emit_group_load (rtx, rtx, tree, int);
|
|
430
|
|
431 /* Similarly, but load into new temporaries. */
|
|
432 extern rtx emit_group_load_into_temps (rtx, rtx, tree, int);
|
|
433
|
|
434 /* Move a non-consecutive group of registers represented by a PARALLEL into
|
|
435 a non-consecutive group of registers represented by a PARALLEL. */
|
|
436 extern void emit_group_move (rtx, rtx);
|
|
437
|
|
438 /* Move a group of registers represented by a PARALLEL into pseudos. */
|
|
439 extern rtx emit_group_move_into_temps (rtx);
|
|
440
|
|
441 /* Store a BLKmode value from non-consecutive registers represented by a
|
|
442 PARALLEL. */
|
|
443 extern void emit_group_store (rtx, rtx, tree, int);
|
|
444
|
|
445 /* Copy BLKmode object from a set of registers. */
|
|
446 extern rtx copy_blkmode_from_reg (rtx, rtx, tree);
|
|
447
|
|
448 /* Mark REG as holding a parameter for the next CALL_INSN. */
|
|
449 extern void use_reg (rtx *, rtx);
|
|
450
|
|
451 /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
|
|
452 for the next CALL_INSN. */
|
|
453 extern void use_regs (rtx *, int, int);
|
|
454
|
|
455 /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
|
|
456 extern void use_group_regs (rtx *, rtx);
|
|
457
|
|
458 /* Write zeros through the storage of OBJECT.
|
|
459 If OBJECT has BLKmode, SIZE is its length in bytes. */
|
|
460 extern rtx clear_storage (rtx, rtx, enum block_op_methods);
|
|
461 extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods,
|
|
462 unsigned int, HOST_WIDE_INT);
|
|
463 /* The same, but always output an library call. */
|
|
464 rtx set_storage_via_libcall (rtx, rtx, rtx, bool);
|
|
465
|
|
466 /* Expand a setmem pattern; return true if successful. */
|
|
467 extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
|
|
468 unsigned int, HOST_WIDE_INT);
|
|
469
|
|
470 /* Determine whether the LEN bytes can be moved by using several move
|
|
471 instructions. Return nonzero if a call to move_by_pieces should
|
|
472 succeed. */
|
|
473 extern int can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
|
|
474
|
|
475 /* Return nonzero if it is desirable to store LEN bytes generated by
|
|
476 CONSTFUN with several move instructions by store_by_pieces
|
|
477 function. CONSTFUNDATA is a pointer which will be passed as argument
|
|
478 in every CONSTFUN call.
|
|
479 ALIGN is maximum alignment we can assume.
|
|
480 MEMSETP is true if this is a real memset/bzero, not a copy
|
|
481 of a const string. */
|
|
482 extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
|
|
483 rtx (*) (void *, HOST_WIDE_INT,
|
|
484 enum machine_mode),
|
|
485 void *, unsigned int, bool);
|
|
486
|
|
487 /* Generate several move instructions to store LEN bytes generated by
|
|
488 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
|
|
489 pointer which will be passed as argument in every CONSTFUN call.
|
|
490 ALIGN is maximum alignment we can assume.
|
|
491 MEMSETP is true if this is a real memset/bzero, not a copy.
|
|
492 Returns TO + LEN. */
|
|
493 extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
|
|
494 rtx (*) (void *, HOST_WIDE_INT, enum machine_mode),
|
|
495 void *, unsigned int, bool, int);
|
|
496
|
|
497 /* Emit insns to set X from Y. */
|
|
498 extern rtx emit_move_insn (rtx, rtx);
|
|
499
|
|
500 /* Emit insns to set X from Y, with no frills. */
|
|
501 extern rtx emit_move_insn_1 (rtx, rtx);
|
|
502
|
|
503 extern rtx emit_move_complex_push (enum machine_mode, rtx, rtx);
|
|
504 extern rtx emit_move_complex_parts (rtx, rtx);
|
|
505
|
|
506 /* Push a block of length SIZE (perhaps variable)
|
|
507 and return an rtx to address the beginning of the block. */
|
|
508 extern rtx push_block (rtx, int, int);
|
|
509
|
|
510 /* Generate code to push something onto the stack, given its mode and type. */
|
|
511 extern void emit_push_insn (rtx, enum machine_mode, tree, rtx, unsigned int,
|
|
512 int, rtx, int, rtx, rtx, int, rtx);
|
|
513
|
|
514 /* Expand an assignment that stores the value of FROM into TO. */
|
|
515 extern void expand_assignment (tree, tree, bool);
|
|
516
|
|
517 /* Generate code for computing expression EXP,
|
|
518 and storing the value into TARGET.
|
|
519 If SUGGEST_REG is nonzero, copy the value through a register
|
|
520 and return that register, if that is possible. */
|
|
521 extern rtx store_expr (tree, rtx, int, bool);
|
|
522
|
|
523 /* Given an rtx that may include add and multiply operations,
|
|
524 generate them as insns and return a pseudo-reg containing the value.
|
|
525 Useful after calling expand_expr with 1 as sum_ok. */
|
|
526 extern rtx force_operand (rtx, rtx);
|
|
527
|
|
528 /* Work horse for expand_expr. */
|
|
529 extern rtx expand_expr_real (tree, rtx, enum machine_mode,
|
|
530 enum expand_modifier, rtx *);
|
|
531
|
|
532 /* Generate code for computing expression EXP.
|
|
533 An rtx for the computed value is returned. The value is never null.
|
|
534 In the case of a void EXP, const0_rtx is returned. */
|
|
535 static inline rtx
|
|
536 expand_expr (tree exp, rtx target, enum machine_mode mode,
|
|
537 enum expand_modifier modifier)
|
|
538 {
|
|
539 return expand_expr_real (exp, target, mode, modifier, NULL);
|
|
540 }
|
|
541
|
|
542 static inline rtx
|
|
543 expand_normal (tree exp)
|
|
544 {
|
|
545 return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL);
|
|
546 }
|
|
547
|
|
548 /* At the start of a function, record that we have no previously-pushed
|
|
549 arguments waiting to be popped. */
|
|
550 extern void init_pending_stack_adjust (void);
|
|
551
|
|
552 /* Discard any pending stack adjustment. */
|
|
553 extern void discard_pending_stack_adjust (void);
|
|
554
|
|
555 /* When exiting from function, if safe, clear out any pending stack adjust
|
|
556 so the adjustment won't get done. */
|
|
557 extern void clear_pending_stack_adjust (void);
|
|
558
|
|
559 /* Pop any previously-pushed arguments that have not been popped yet. */
|
|
560 extern void do_pending_stack_adjust (void);
|
|
561
|
|
562 /* Return the tree node and offset if a given argument corresponds to
|
|
563 a string constant. */
|
|
564 extern tree string_constant (tree, tree *);
|
|
565
|
|
566 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
|
|
567 extern void jumpifnot (tree, rtx);
|
|
568
|
|
569 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
|
|
570 extern void jumpif (tree, rtx);
|
|
571
|
|
572 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
|
|
573 the result is zero, or IF_TRUE_LABEL if the result is one. */
|
|
574 extern void do_jump (tree, rtx, rtx);
|
|
575
|
|
576 /* Generate rtl to compare two rtx's, will call emit_cmp_insn. */
|
|
577 extern rtx compare_from_rtx (rtx, rtx, enum rtx_code, int, enum machine_mode,
|
|
578 rtx);
|
|
579 extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
|
|
580 enum machine_mode, rtx, rtx, rtx);
|
|
581
|
|
582 /* Two different ways of generating switch statements. */
|
|
583 extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx);
|
|
584 extern int try_tablejump (tree, tree, tree, tree, rtx, rtx);
|
|
585
|
|
586 /* Smallest number of adjacent cases before we use a jump table.
|
|
587 XXX Should be a target hook. */
|
|
588 extern unsigned int case_values_threshold (void);
|
|
589
|
|
590 /* Functions from alias.c */
|
|
591 #include "alias.h"
|
|
592
|
|
593
|
|
594 /* rtl.h and tree.h were included. */
|
|
595 /* Return an rtx for the size in bytes of the value of an expr. */
|
|
596 extern rtx expr_size (tree);
|
|
597
|
|
598 /* Return a wide integer for the size in bytes of the value of EXP, or -1
|
|
599 if the size can vary or is larger than an integer. */
|
|
600 extern HOST_WIDE_INT int_expr_size (tree);
|
|
601
|
|
602 /* Return an rtx that refers to the value returned by a function
|
|
603 in its original home. This becomes invalid if any more code is emitted. */
|
|
604 extern rtx hard_function_value (const_tree, const_tree, const_tree, int);
|
|
605
|
|
606 extern rtx prepare_call_address (rtx, rtx, rtx *, int, int);
|
|
607
|
|
608 extern bool shift_return_value (enum machine_mode, bool, rtx);
|
|
609
|
|
610 extern rtx expand_call (tree, rtx, int);
|
|
611
|
|
612 extern void fixup_tail_calls (void);
|
|
613
|
|
614 #ifdef TREE_CODE
|
|
615 extern rtx expand_shift (enum tree_code, enum machine_mode, rtx, tree, rtx,
|
|
616 int);
|
|
617 extern rtx expand_divmod (int, enum tree_code, enum machine_mode, rtx, rtx,
|
|
618 rtx, int);
|
|
619 #endif
|
|
620
|
|
621 extern void locate_and_pad_parm (enum machine_mode, tree, int, int, tree,
|
|
622 struct args_size *,
|
|
623 struct locate_and_pad_arg_data *);
|
|
624
|
|
625 /* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary. */
|
|
626 extern rtx label_rtx (tree);
|
|
627
|
|
628 /* As label_rtx, but additionally the label is placed on the forced label
|
|
629 list of its containing function (i.e. it is treated as reachable even
|
|
630 if how is not obvious). */
|
|
631 extern rtx force_label_rtx (tree);
|
|
632
|
|
633 /* Return an rtx like arg but sans any constant terms.
|
|
634 Returns the original rtx if it has no constant terms.
|
|
635 The constant terms are added and stored via a second arg. */
|
|
636 extern rtx eliminate_constant_term (rtx, rtx *);
|
|
637
|
|
638 /* Convert arg to a valid memory address for specified machine mode,
|
|
639 by emitting insns to perform arithmetic if nec. */
|
|
640 extern rtx memory_address (enum machine_mode, rtx);
|
|
641
|
|
642 /* Return a memory reference like MEMREF, but with its mode changed
|
|
643 to MODE and its address changed to ADDR.
|
|
644 (VOIDmode means don't change the mode.
|
|
645 NULL for ADDR means don't change the address.) */
|
|
646 extern rtx change_address (rtx, enum machine_mode, rtx);
|
|
647
|
|
648 /* Return a memory reference like MEMREF, but with its mode changed
|
|
649 to MODE and its address offset by OFFSET bytes. */
|
|
650 #define adjust_address(MEMREF, MODE, OFFSET) \
|
|
651 adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1)
|
|
652
|
|
653 /* Likewise, but the reference is not required to be valid. */
|
|
654 #define adjust_address_nv(MEMREF, MODE, OFFSET) \
|
|
655 adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1)
|
|
656
|
|
657 /* Return a memory reference like MEMREF, but with its mode changed
|
|
658 to MODE and its address changed to ADDR, which is assumed to be
|
|
659 increased by OFFSET bytes from MEMREF. */
|
|
660 #define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \
|
|
661 adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1)
|
|
662
|
|
663 /* Likewise, but the reference is not required to be valid. */
|
|
664 #define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
|
|
665 adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
|
|
666
|
|
667 extern rtx adjust_address_1 (rtx, enum machine_mode, HOST_WIDE_INT, int, int);
|
|
668 extern rtx adjust_automodify_address_1 (rtx, enum machine_mode, rtx,
|
|
669 HOST_WIDE_INT, int);
|
|
670
|
|
671 /* Return a memory reference like MEMREF, but whose address is changed by
|
|
672 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
|
|
673 known to be in OFFSET (possibly 1). */
|
|
674 extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
|
|
675
|
|
676 /* Definitions from emit-rtl.c */
|
|
677 #include "emit-rtl.h"
|
|
678
|
|
679 /* Return a memory reference like MEMREF, but with its mode widened to
|
|
680 MODE and adjusted by OFFSET. */
|
|
681 extern rtx widen_memory_access (rtx, enum machine_mode, HOST_WIDE_INT);
|
|
682
|
|
683 /* Return a memory reference like MEMREF, but which is known to have a
|
|
684 valid address. */
|
|
685 extern rtx validize_mem (rtx);
|
|
686
|
|
687 extern rtx use_anchored_address (rtx);
|
|
688
|
|
689 /* Given REF, a MEM, and T, either the type of X or the expression
|
|
690 corresponding to REF, set the memory attributes. OBJECTP is nonzero
|
|
691 if we are making a new object of this type. */
|
|
692 extern void set_mem_attributes (rtx, tree, int);
|
|
693
|
|
694 /* Similar, except that BITPOS has not yet been applied to REF, so if
|
|
695 we alter MEM_OFFSET according to T then we should subtract BITPOS
|
|
696 expecting that it'll be added back in later. */
|
|
697 extern void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT);
|
|
698
|
|
699 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
|
|
700 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
|
|
701 -1 if not known. */
|
|
702 extern int get_mem_align_offset (rtx, int);
|
|
703
|
|
704 /* Assemble the static constant template for function entry trampolines. */
|
|
705 extern rtx assemble_trampoline_template (void);
|
|
706
|
|
707 /* Copy given rtx to a new temp reg and return that. */
|
|
708 extern rtx copy_to_reg (rtx);
|
|
709
|
|
710 /* Like copy_to_reg but always make the reg Pmode. */
|
|
711 extern rtx copy_addr_to_reg (rtx);
|
|
712
|
|
713 /* Like copy_to_reg but always make the reg the specified mode MODE. */
|
|
714 extern rtx copy_to_mode_reg (enum machine_mode, rtx);
|
|
715
|
|
716 /* Copy given rtx to given temp reg and return that. */
|
|
717 extern rtx copy_to_suggested_reg (rtx, rtx, enum machine_mode);
|
|
718
|
|
719 /* Copy a value to a register if it isn't already a register.
|
|
720 Args are mode (in case value is a constant) and the value. */
|
|
721 extern rtx force_reg (enum machine_mode, rtx);
|
|
722
|
|
723 /* Return given rtx, copied into a new temp reg if it was in memory. */
|
|
724 extern rtx force_not_mem (rtx);
|
|
725
|
|
726 /* Return mode and signedness to use when object is promoted. */
|
|
727 extern enum machine_mode promote_mode (const_tree, enum machine_mode, int *, int);
|
|
728
|
|
729 /* Remove some bytes from the stack. An rtx says how many. */
|
|
730 extern void adjust_stack (rtx);
|
|
731
|
|
732 /* Add some bytes to the stack. An rtx says how many. */
|
|
733 extern void anti_adjust_stack (rtx);
|
|
734
|
|
735 /* This enum is used for the following two functions. */
|
|
736 enum save_level {SAVE_BLOCK, SAVE_FUNCTION, SAVE_NONLOCAL};
|
|
737
|
|
738 /* Save the stack pointer at the specified level. */
|
|
739 extern void emit_stack_save (enum save_level, rtx *, rtx);
|
|
740
|
|
741 /* Restore the stack pointer from a save area of the specified level. */
|
|
742 extern void emit_stack_restore (enum save_level, rtx, rtx);
|
|
743
|
|
744 /* Invoke emit_stack_save for the nonlocal_goto_save_area. */
|
|
745 extern void update_nonlocal_goto_save_area (void);
|
|
746
|
|
747 /* Allocate some space on the stack dynamically and return its address. An rtx
|
|
748 says how many bytes. */
|
|
749 extern rtx allocate_dynamic_stack_space (rtx, rtx, int);
|
|
750
|
|
751 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
|
|
752 FIRST is a constant and size is a Pmode RTX. These are offsets from the
|
|
753 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
|
|
754 subtract from the stack. If SIZE is constant, this is done
|
|
755 with a fixed number of probes. Otherwise, we must make a loop. */
|
|
756 extern void probe_stack_range (HOST_WIDE_INT, rtx);
|
|
757
|
|
758 /* Return an rtx that refers to the value returned by a library call
|
|
759 in its original home. This becomes invalid if any more code is emitted. */
|
|
760 extern rtx hard_libcall_value (enum machine_mode);
|
|
761
|
|
762 /* Return the mode desired by operand N of a particular bitfield
|
|
763 insert/extract insn, or MAX_MACHINE_MODE if no such insn is
|
|
764 available. */
|
|
765
|
|
766 enum extraction_pattern { EP_insv, EP_extv, EP_extzv };
|
|
767 extern enum machine_mode
|
|
768 mode_for_extraction (enum extraction_pattern, int);
|
|
769
|
|
770 extern void store_bit_field (rtx, unsigned HOST_WIDE_INT,
|
|
771 unsigned HOST_WIDE_INT, enum machine_mode, rtx);
|
|
772 extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
|
|
773 unsigned HOST_WIDE_INT, int, rtx,
|
|
774 enum machine_mode, enum machine_mode);
|
|
775 extern rtx extract_low_bits (enum machine_mode, enum machine_mode, rtx);
|
|
776 extern rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int);
|
|
777 extern rtx expand_mult_highpart_adjust (enum machine_mode, rtx, rtx, rtx, rtx, int);
|
|
778
|
|
779 extern rtx assemble_static_space (unsigned HOST_WIDE_INT);
|
|
780 extern int safe_from_p (const_rtx, tree, int);
|
|
781
|
|
782 /* Call this once to initialize the contents of the optabs
|
|
783 appropriately for the current target machine. */
|
|
784 extern void init_optabs (void);
|
|
785 extern void init_all_optabs (void);
|
|
786
|
|
787 /* Call this to initialize an optab function entry. */
|
|
788 extern rtx init_one_libfunc (const char *);
|
|
789 extern rtx set_user_assembler_libfunc (const char *, const char *);
|
|
790
|
|
791 extern int vector_mode_valid_p (enum machine_mode);
|
|
792
|
|
793 #endif /* GCC_EXPR_H */
|