0
|
1 /* Subroutines used by or related to instruction recognition.
|
|
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
|
|
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
|
|
4 Free Software Foundation, Inc.
|
|
5
|
|
6 This file is part of GCC.
|
|
7
|
|
8 GCC is free software; you can redistribute it and/or modify it under
|
|
9 the terms of the GNU General Public License as published by the Free
|
|
10 Software Foundation; either version 3, or (at your option) any later
|
|
11 version.
|
|
12
|
|
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
16 for more details.
|
|
17
|
|
18 You should have received a copy of the GNU General Public License
|
|
19 along with GCC; see the file COPYING3. If not see
|
|
20 <http://www.gnu.org/licenses/>. */
|
|
21
|
|
22
|
|
23 #include "config.h"
|
|
24 #include "system.h"
|
|
25 #include "coretypes.h"
|
|
26 #include "tm.h"
|
|
27 #include "rtl.h"
|
|
28 #include "tm_p.h"
|
|
29 #include "insn-config.h"
|
|
30 #include "insn-attr.h"
|
|
31 #include "hard-reg-set.h"
|
|
32 #include "recog.h"
|
|
33 #include "regs.h"
|
|
34 #include "addresses.h"
|
|
35 #include "expr.h"
|
|
36 #include "function.h"
|
|
37 #include "flags.h"
|
|
38 #include "real.h"
|
|
39 #include "toplev.h"
|
|
40 #include "basic-block.h"
|
|
41 #include "output.h"
|
|
42 #include "reload.h"
|
|
43 #include "target.h"
|
|
44 #include "timevar.h"
|
|
45 #include "tree-pass.h"
|
|
46 #include "df.h"
|
|
47
|
|
48 #ifndef STACK_PUSH_CODE
|
|
49 #ifdef STACK_GROWS_DOWNWARD
|
|
50 #define STACK_PUSH_CODE PRE_DEC
|
|
51 #else
|
|
52 #define STACK_PUSH_CODE PRE_INC
|
|
53 #endif
|
|
54 #endif
|
|
55
|
|
56 #ifndef STACK_POP_CODE
|
|
57 #ifdef STACK_GROWS_DOWNWARD
|
|
58 #define STACK_POP_CODE POST_INC
|
|
59 #else
|
|
60 #define STACK_POP_CODE POST_DEC
|
|
61 #endif
|
|
62 #endif
|
|
63
|
|
64 #ifndef HAVE_ATTR_enabled
|
|
65 static inline bool
|
|
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
|
|
67 {
|
|
68 return true;
|
|
69 }
|
|
70 #endif
|
|
71
|
|
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
|
|
73 static void validate_replace_src_1 (rtx *, void *);
|
|
74 static rtx split_insn (rtx);
|
|
75
|
|
76 /* Nonzero means allow operands to be volatile.
|
|
77 This should be 0 if you are generating rtl, such as if you are calling
|
|
78 the functions in optabs.c and expmed.c (most of the time).
|
|
79 This should be 1 if all valid insns need to be recognized,
|
|
80 such as in reginfo.c and final.c and reload.c.
|
|
81
|
|
82 init_recog and init_recog_no_volatile are responsible for setting this. */
|
|
83
|
|
84 int volatile_ok;
|
|
85
|
|
86 struct recog_data recog_data;
|
|
87
|
|
88 /* Contains a vector of operand_alternative structures for every operand.
|
|
89 Set up by preprocess_constraints. */
|
|
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
|
|
91
|
|
92 /* On return from `constrain_operands', indicate which alternative
|
|
93 was satisfied. */
|
|
94
|
|
95 int which_alternative;
|
|
96
|
|
97 /* Nonzero after end of reload pass.
|
|
98 Set to 1 or 0 by toplev.c.
|
|
99 Controls the significance of (SUBREG (MEM)). */
|
|
100
|
|
101 int reload_completed;
|
|
102
|
|
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
|
|
104 int epilogue_completed;
|
|
105
|
|
106 /* Initialize data used by the function `recog'.
|
|
107 This must be called once in the compilation of a function
|
|
108 before any insn recognition may be done in the function. */
|
|
109
|
|
110 void
|
|
111 init_recog_no_volatile (void)
|
|
112 {
|
|
113 volatile_ok = 0;
|
|
114 }
|
|
115
|
|
116 void
|
|
117 init_recog (void)
|
|
118 {
|
|
119 volatile_ok = 1;
|
|
120 }
|
|
121
|
|
122
|
|
123 /* Check that X is an insn-body for an `asm' with operands
|
|
124 and that the operands mentioned in it are legitimate. */
|
|
125
|
|
126 int
|
|
127 check_asm_operands (rtx x)
|
|
128 {
|
|
129 int noperands;
|
|
130 rtx *operands;
|
|
131 const char **constraints;
|
|
132 int i;
|
|
133
|
|
134 /* Post-reload, be more strict with things. */
|
|
135 if (reload_completed)
|
|
136 {
|
|
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
|
|
138 extract_insn (make_insn_raw (x));
|
|
139 constrain_operands (1);
|
|
140 return which_alternative >= 0;
|
|
141 }
|
|
142
|
|
143 noperands = asm_noperands (x);
|
|
144 if (noperands < 0)
|
|
145 return 0;
|
|
146 if (noperands == 0)
|
|
147 return 1;
|
|
148
|
|
149 operands = XALLOCAVEC (rtx, noperands);
|
|
150 constraints = XALLOCAVEC (const char *, noperands);
|
|
151
|
|
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
|
|
153
|
|
154 for (i = 0; i < noperands; i++)
|
|
155 {
|
|
156 const char *c = constraints[i];
|
|
157 if (c[0] == '%')
|
|
158 c++;
|
|
159 if (! asm_operand_ok (operands[i], c, constraints))
|
|
160 return 0;
|
|
161 }
|
|
162
|
|
163 return 1;
|
|
164 }
|
|
165
|
|
166 /* Static data for the next two routines. */
|
|
167
|
|
168 typedef struct change_t
|
|
169 {
|
|
170 rtx object;
|
|
171 int old_code;
|
|
172 rtx *loc;
|
|
173 rtx old;
|
|
174 bool unshare;
|
|
175 } change_t;
|
|
176
|
|
177 static change_t *changes;
|
|
178 static int changes_allocated;
|
|
179
|
|
180 static int num_changes = 0;
|
|
181
|
|
182 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
|
|
183 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
|
|
184 the change is simply made.
|
|
185
|
|
186 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
|
|
187 will be called with the address and mode as parameters. If OBJECT is
|
|
188 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
|
|
189 the change in place.
|
|
190
|
|
191 IN_GROUP is nonzero if this is part of a group of changes that must be
|
|
192 performed as a group. In that case, the changes will be stored. The
|
|
193 function `apply_change_group' will validate and apply the changes.
|
|
194
|
|
195 If IN_GROUP is zero, this is a single change. Try to recognize the insn
|
|
196 or validate the memory reference with the change applied. If the result
|
|
197 is not valid for the machine, suppress the change and return zero.
|
|
198 Otherwise, perform the change and return 1. */
|
|
199
|
|
200 static bool
|
|
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
|
|
202 {
|
|
203 rtx old = *loc;
|
|
204
|
|
205 if (old == new_rtx || rtx_equal_p (old, new_rtx))
|
|
206 return 1;
|
|
207
|
|
208 gcc_assert (in_group != 0 || num_changes == 0);
|
|
209
|
|
210 *loc = new_rtx;
|
|
211
|
|
212 /* Save the information describing this change. */
|
|
213 if (num_changes >= changes_allocated)
|
|
214 {
|
|
215 if (changes_allocated == 0)
|
|
216 /* This value allows for repeated substitutions inside complex
|
|
217 indexed addresses, or changes in up to 5 insns. */
|
|
218 changes_allocated = MAX_RECOG_OPERANDS * 5;
|
|
219 else
|
|
220 changes_allocated *= 2;
|
|
221
|
|
222 changes = XRESIZEVEC (change_t, changes, changes_allocated);
|
|
223 }
|
|
224
|
|
225 changes[num_changes].object = object;
|
|
226 changes[num_changes].loc = loc;
|
|
227 changes[num_changes].old = old;
|
|
228 changes[num_changes].unshare = unshare;
|
|
229
|
|
230 if (object && !MEM_P (object))
|
|
231 {
|
|
232 /* Set INSN_CODE to force rerecognition of insn. Save old code in
|
|
233 case invalid. */
|
|
234 changes[num_changes].old_code = INSN_CODE (object);
|
|
235 INSN_CODE (object) = -1;
|
|
236 }
|
|
237
|
|
238 num_changes++;
|
|
239
|
|
240 /* If we are making a group of changes, return 1. Otherwise, validate the
|
|
241 change group we made. */
|
|
242
|
|
243 if (in_group)
|
|
244 return 1;
|
|
245 else
|
|
246 return apply_change_group ();
|
|
247 }
|
|
248
|
|
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
|
|
250 UNSHARE to false. */
|
|
251
|
|
252 bool
|
|
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
|
|
254 {
|
|
255 return validate_change_1 (object, loc, new_rtx, in_group, false);
|
|
256 }
|
|
257
|
|
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
|
|
259 UNSHARE to true. */
|
|
260
|
|
261 bool
|
|
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
|
|
263 {
|
|
264 return validate_change_1 (object, loc, new_rtx, in_group, true);
|
|
265 }
|
|
266
|
|
267
|
|
268 /* Keep X canonicalized if some changes have made it non-canonical; only
|
|
269 modifies the operands of X, not (for example) its code. Simplifications
|
|
270 are not the job of this routine.
|
|
271
|
|
272 Return true if anything was changed. */
|
|
273 bool
|
|
274 canonicalize_change_group (rtx insn, rtx x)
|
|
275 {
|
|
276 if (COMMUTATIVE_P (x)
|
|
277 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
|
|
278 {
|
|
279 /* Oops, the caller has made X no longer canonical.
|
|
280 Let's redo the changes in the correct order. */
|
|
281 rtx tem = XEXP (x, 0);
|
|
282 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
|
|
283 validate_change (insn, &XEXP (x, 1), tem, 1);
|
|
284 return true;
|
|
285 }
|
|
286 else
|
|
287 return false;
|
|
288 }
|
|
289
|
|
290
|
|
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
|
|
292 were valid; i.e. whether INSN can still be recognized. */
|
|
293
|
|
294 int
|
|
295 insn_invalid_p (rtx insn)
|
|
296 {
|
|
297 rtx pat = PATTERN (insn);
|
|
298 int num_clobbers = 0;
|
|
299 /* If we are before reload and the pattern is a SET, see if we can add
|
|
300 clobbers. */
|
|
301 int icode = recog (pat, insn,
|
|
302 (GET_CODE (pat) == SET
|
|
303 && ! reload_completed && ! reload_in_progress)
|
|
304 ? &num_clobbers : 0);
|
|
305 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
|
|
306
|
|
307
|
|
308 /* If this is an asm and the operand aren't legal, then fail. Likewise if
|
|
309 this is not an asm and the insn wasn't recognized. */
|
|
310 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
|
|
311 || (!is_asm && icode < 0))
|
|
312 return 1;
|
|
313
|
|
314 /* If we have to add CLOBBERs, fail if we have to add ones that reference
|
|
315 hard registers since our callers can't know if they are live or not.
|
|
316 Otherwise, add them. */
|
|
317 if (num_clobbers > 0)
|
|
318 {
|
|
319 rtx newpat;
|
|
320
|
|
321 if (added_clobbers_hard_reg_p (icode))
|
|
322 return 1;
|
|
323
|
|
324 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
|
|
325 XVECEXP (newpat, 0, 0) = pat;
|
|
326 add_clobbers (newpat, icode);
|
|
327 PATTERN (insn) = pat = newpat;
|
|
328 }
|
|
329
|
|
330 /* After reload, verify that all constraints are satisfied. */
|
|
331 if (reload_completed)
|
|
332 {
|
|
333 extract_insn (insn);
|
|
334
|
|
335 if (! constrain_operands (1))
|
|
336 return 1;
|
|
337 }
|
|
338
|
|
339 INSN_CODE (insn) = icode;
|
|
340 return 0;
|
|
341 }
|
|
342
|
|
343 /* Return number of changes made and not validated yet. */
|
|
344 int
|
|
345 num_changes_pending (void)
|
|
346 {
|
|
347 return num_changes;
|
|
348 }
|
|
349
|
|
350 /* Tentatively apply the changes numbered NUM and up.
|
|
351 Return 1 if all changes are valid, zero otherwise. */
|
|
352
|
|
353 int
|
|
354 verify_changes (int num)
|
|
355 {
|
|
356 int i;
|
|
357 rtx last_validated = NULL_RTX;
|
|
358
|
|
359 /* The changes have been applied and all INSN_CODEs have been reset to force
|
|
360 rerecognition.
|
|
361
|
|
362 The changes are valid if we aren't given an object, or if we are
|
|
363 given a MEM and it still is a valid address, or if this is in insn
|
|
364 and it is recognized. In the latter case, if reload has completed,
|
|
365 we also require that the operands meet the constraints for
|
|
366 the insn. */
|
|
367
|
|
368 for (i = num; i < num_changes; i++)
|
|
369 {
|
|
370 rtx object = changes[i].object;
|
|
371
|
|
372 /* If there is no object to test or if it is the same as the one we
|
|
373 already tested, ignore it. */
|
|
374 if (object == 0 || object == last_validated)
|
|
375 continue;
|
|
376
|
|
377 if (MEM_P (object))
|
|
378 {
|
|
379 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
|
|
380 break;
|
|
381 }
|
|
382 else if (REG_P (changes[i].old)
|
|
383 && asm_noperands (PATTERN (object)) > 0
|
|
384 && REG_EXPR (changes[i].old) != NULL_TREE
|
|
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
|
|
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
|
|
387 {
|
|
388 /* Don't allow changes of hard register operands to inline
|
|
389 assemblies if they have been defined as register asm ("x"). */
|
|
390 break;
|
|
391 }
|
|
392 else if (insn_invalid_p (object))
|
|
393 {
|
|
394 rtx pat = PATTERN (object);
|
|
395
|
|
396 /* Perhaps we couldn't recognize the insn because there were
|
|
397 extra CLOBBERs at the end. If so, try to re-recognize
|
|
398 without the last CLOBBER (later iterations will cause each of
|
|
399 them to be eliminated, in turn). But don't do this if we
|
|
400 have an ASM_OPERAND. */
|
|
401 if (GET_CODE (pat) == PARALLEL
|
|
402 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
|
|
403 && asm_noperands (PATTERN (object)) < 0)
|
|
404 {
|
|
405 rtx newpat;
|
|
406
|
|
407 if (XVECLEN (pat, 0) == 2)
|
|
408 newpat = XVECEXP (pat, 0, 0);
|
|
409 else
|
|
410 {
|
|
411 int j;
|
|
412
|
|
413 newpat
|
|
414 = gen_rtx_PARALLEL (VOIDmode,
|
|
415 rtvec_alloc (XVECLEN (pat, 0) - 1));
|
|
416 for (j = 0; j < XVECLEN (newpat, 0); j++)
|
|
417 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
|
|
418 }
|
|
419
|
|
420 /* Add a new change to this group to replace the pattern
|
|
421 with this new pattern. Then consider this change
|
|
422 as having succeeded. The change we added will
|
|
423 cause the entire call to fail if things remain invalid.
|
|
424
|
|
425 Note that this can lose if a later change than the one
|
|
426 we are processing specified &XVECEXP (PATTERN (object), 0, X)
|
|
427 but this shouldn't occur. */
|
|
428
|
|
429 validate_change (object, &PATTERN (object), newpat, 1);
|
|
430 continue;
|
|
431 }
|
|
432 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
|
|
433 /* If this insn is a CLOBBER or USE, it is always valid, but is
|
|
434 never recognized. */
|
|
435 continue;
|
|
436 else
|
|
437 break;
|
|
438 }
|
|
439 last_validated = object;
|
|
440 }
|
|
441
|
|
442 return (i == num_changes);
|
|
443 }
|
|
444
|
|
445 /* A group of changes has previously been issued with validate_change
|
|
446 and verified with verify_changes. Call df_insn_rescan for each of
|
|
447 the insn changed and clear num_changes. */
|
|
448
|
|
449 void
|
|
450 confirm_change_group (void)
|
|
451 {
|
|
452 int i;
|
|
453 rtx last_object = NULL;
|
|
454
|
|
455 for (i = 0; i < num_changes; i++)
|
|
456 {
|
|
457 rtx object = changes[i].object;
|
|
458
|
|
459 if (changes[i].unshare)
|
|
460 *changes[i].loc = copy_rtx (*changes[i].loc);
|
|
461
|
|
462 /* Avoid unnecessary rescanning when multiple changes to same instruction
|
|
463 are made. */
|
|
464 if (object)
|
|
465 {
|
|
466 if (object != last_object && last_object && INSN_P (last_object))
|
|
467 df_insn_rescan (last_object);
|
|
468 last_object = object;
|
|
469 }
|
|
470 }
|
|
471
|
|
472 if (last_object && INSN_P (last_object))
|
|
473 df_insn_rescan (last_object);
|
|
474 num_changes = 0;
|
|
475 }
|
|
476
|
|
477 /* Apply a group of changes previously issued with `validate_change'.
|
|
478 If all changes are valid, call confirm_change_group and return 1,
|
|
479 otherwise, call cancel_changes and return 0. */
|
|
480
|
|
481 int
|
|
482 apply_change_group (void)
|
|
483 {
|
|
484 if (verify_changes (0))
|
|
485 {
|
|
486 confirm_change_group ();
|
|
487 return 1;
|
|
488 }
|
|
489 else
|
|
490 {
|
|
491 cancel_changes (0);
|
|
492 return 0;
|
|
493 }
|
|
494 }
|
|
495
|
|
496
|
|
497 /* Return the number of changes so far in the current group. */
|
|
498
|
|
499 int
|
|
500 num_validated_changes (void)
|
|
501 {
|
|
502 return num_changes;
|
|
503 }
|
|
504
|
|
505 /* Retract the changes numbered NUM and up. */
|
|
506
|
|
507 void
|
|
508 cancel_changes (int num)
|
|
509 {
|
|
510 int i;
|
|
511
|
|
512 /* Back out all the changes. Do this in the opposite order in which
|
|
513 they were made. */
|
|
514 for (i = num_changes - 1; i >= num; i--)
|
|
515 {
|
|
516 *changes[i].loc = changes[i].old;
|
|
517 if (changes[i].object && !MEM_P (changes[i].object))
|
|
518 INSN_CODE (changes[i].object) = changes[i].old_code;
|
|
519 }
|
|
520 num_changes = num;
|
|
521 }
|
|
522
|
|
523 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
|
|
524 rtx. */
|
|
525
|
|
526 static void
|
|
527 simplify_while_replacing (rtx *loc, rtx to, rtx object,
|
|
528 enum machine_mode op0_mode)
|
|
529 {
|
|
530 rtx x = *loc;
|
|
531 enum rtx_code code = GET_CODE (x);
|
|
532 rtx new_rtx;
|
|
533
|
|
534 if (SWAPPABLE_OPERANDS_P (x)
|
|
535 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
|
|
536 {
|
|
537 validate_unshare_change (object, loc,
|
|
538 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
|
|
539 : swap_condition (code),
|
|
540 GET_MODE (x), XEXP (x, 1),
|
|
541 XEXP (x, 0)), 1);
|
|
542 x = *loc;
|
|
543 code = GET_CODE (x);
|
|
544 }
|
|
545
|
|
546 switch (code)
|
|
547 {
|
|
548 case PLUS:
|
|
549 /* If we have a PLUS whose second operand is now a CONST_INT, use
|
|
550 simplify_gen_binary to try to simplify it.
|
|
551 ??? We may want later to remove this, once simplification is
|
|
552 separated from this function. */
|
|
553 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
|
|
554 validate_change (object, loc,
|
|
555 simplify_gen_binary
|
|
556 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
|
|
557 break;
|
|
558 case MINUS:
|
|
559 if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
|
560 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
|
|
561 validate_change (object, loc,
|
|
562 simplify_gen_binary
|
|
563 (PLUS, GET_MODE (x), XEXP (x, 0),
|
|
564 simplify_gen_unary (NEG,
|
|
565 GET_MODE (x), XEXP (x, 1),
|
|
566 GET_MODE (x))), 1);
|
|
567 break;
|
|
568 case ZERO_EXTEND:
|
|
569 case SIGN_EXTEND:
|
|
570 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
|
|
571 {
|
|
572 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
|
|
573 op0_mode);
|
|
574 /* If any of the above failed, substitute in something that
|
|
575 we know won't be recognized. */
|
|
576 if (!new_rtx)
|
|
577 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
|
|
578 validate_change (object, loc, new_rtx, 1);
|
|
579 }
|
|
580 break;
|
|
581 case SUBREG:
|
|
582 /* All subregs possible to simplify should be simplified. */
|
|
583 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
|
|
584 SUBREG_BYTE (x));
|
|
585
|
|
586 /* Subregs of VOIDmode operands are incorrect. */
|
|
587 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
|
|
588 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
|
|
589 if (new_rtx)
|
|
590 validate_change (object, loc, new_rtx, 1);
|
|
591 break;
|
|
592 case ZERO_EXTRACT:
|
|
593 case SIGN_EXTRACT:
|
|
594 /* If we are replacing a register with memory, try to change the memory
|
|
595 to be the mode required for memory in extract operations (this isn't
|
|
596 likely to be an insertion operation; if it was, nothing bad will
|
|
597 happen, we might just fail in some cases). */
|
|
598
|
|
599 if (MEM_P (XEXP (x, 0))
|
|
600 && GET_CODE (XEXP (x, 1)) == CONST_INT
|
|
601 && GET_CODE (XEXP (x, 2)) == CONST_INT
|
|
602 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
|
|
603 && !MEM_VOLATILE_P (XEXP (x, 0)))
|
|
604 {
|
|
605 enum machine_mode wanted_mode = VOIDmode;
|
|
606 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
|
|
607 int pos = INTVAL (XEXP (x, 2));
|
|
608
|
|
609 if (GET_CODE (x) == ZERO_EXTRACT)
|
|
610 {
|
|
611 enum machine_mode new_mode
|
|
612 = mode_for_extraction (EP_extzv, 1);
|
|
613 if (new_mode != MAX_MACHINE_MODE)
|
|
614 wanted_mode = new_mode;
|
|
615 }
|
|
616 else if (GET_CODE (x) == SIGN_EXTRACT)
|
|
617 {
|
|
618 enum machine_mode new_mode
|
|
619 = mode_for_extraction (EP_extv, 1);
|
|
620 if (new_mode != MAX_MACHINE_MODE)
|
|
621 wanted_mode = new_mode;
|
|
622 }
|
|
623
|
|
624 /* If we have a narrower mode, we can do something. */
|
|
625 if (wanted_mode != VOIDmode
|
|
626 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
|
|
627 {
|
|
628 int offset = pos / BITS_PER_UNIT;
|
|
629 rtx newmem;
|
|
630
|
|
631 /* If the bytes and bits are counted differently, we
|
|
632 must adjust the offset. */
|
|
633 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
|
|
634 offset =
|
|
635 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
|
|
636 offset);
|
|
637
|
|
638 pos %= GET_MODE_BITSIZE (wanted_mode);
|
|
639
|
|
640 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
|
|
641
|
|
642 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
|
|
643 validate_change (object, &XEXP (x, 0), newmem, 1);
|
|
644 }
|
|
645 }
|
|
646
|
|
647 break;
|
|
648
|
|
649 default:
|
|
650 break;
|
|
651 }
|
|
652 }
|
|
653
|
|
654 /* Replace every occurrence of FROM in X with TO. Mark each change with
|
|
655 validate_change passing OBJECT. */
|
|
656
|
|
657 static void
|
|
658 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
|
|
659 bool simplify)
|
|
660 {
|
|
661 int i, j;
|
|
662 const char *fmt;
|
|
663 rtx x = *loc;
|
|
664 enum rtx_code code;
|
|
665 enum machine_mode op0_mode = VOIDmode;
|
|
666 int prev_changes = num_changes;
|
|
667
|
|
668 if (!x)
|
|
669 return;
|
|
670
|
|
671 code = GET_CODE (x);
|
|
672 fmt = GET_RTX_FORMAT (code);
|
|
673 if (fmt[0] == 'e')
|
|
674 op0_mode = GET_MODE (XEXP (x, 0));
|
|
675
|
|
676 /* X matches FROM if it is the same rtx or they are both referring to the
|
|
677 same register in the same mode. Avoid calling rtx_equal_p unless the
|
|
678 operands look similar. */
|
|
679
|
|
680 if (x == from
|
|
681 || (REG_P (x) && REG_P (from)
|
|
682 && GET_MODE (x) == GET_MODE (from)
|
|
683 && REGNO (x) == REGNO (from))
|
|
684 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
|
|
685 && rtx_equal_p (x, from)))
|
|
686 {
|
|
687 validate_unshare_change (object, loc, to, 1);
|
|
688 return;
|
|
689 }
|
|
690
|
|
691 /* Call ourself recursively to perform the replacements.
|
|
692 We must not replace inside already replaced expression, otherwise we
|
|
693 get infinite recursion for replacements like (reg X)->(subreg (reg X))
|
|
694 done by regmove, so we must special case shared ASM_OPERANDS. */
|
|
695
|
|
696 if (GET_CODE (x) == PARALLEL)
|
|
697 {
|
|
698 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
|
|
699 {
|
|
700 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
|
|
701 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
|
|
702 {
|
|
703 /* Verify that operands are really shared. */
|
|
704 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
|
|
705 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
|
|
706 (x, 0, j))));
|
|
707 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
|
|
708 from, to, object, simplify);
|
|
709 }
|
|
710 else
|
|
711 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
|
|
712 simplify);
|
|
713 }
|
|
714 }
|
|
715 else
|
|
716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
|
717 {
|
|
718 if (fmt[i] == 'e')
|
|
719 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
|
|
720 else if (fmt[i] == 'E')
|
|
721 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
|
722 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
|
|
723 simplify);
|
|
724 }
|
|
725
|
|
726 /* If we didn't substitute, there is nothing more to do. */
|
|
727 if (num_changes == prev_changes)
|
|
728 return;
|
|
729
|
|
730 /* Allow substituted expression to have different mode. This is used by
|
|
731 regmove to change mode of pseudo register. */
|
|
732 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
|
|
733 op0_mode = GET_MODE (XEXP (x, 0));
|
|
734
|
|
735 /* Do changes needed to keep rtx consistent. Don't do any other
|
|
736 simplifications, as it is not our job. */
|
|
737 if (simplify)
|
|
738 simplify_while_replacing (loc, to, object, op0_mode);
|
|
739 }
|
|
740
|
|
741 /* Try replacing every occurrence of FROM in INSN with TO. After all
|
|
742 changes have been made, validate by seeing if INSN is still valid. */
|
|
743
|
|
744 int
|
|
745 validate_replace_rtx (rtx from, rtx to, rtx insn)
|
|
746 {
|
|
747 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
|
|
748 return apply_change_group ();
|
|
749 }
|
|
750
|
|
751 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
|
|
752 is a part of INSN. After all changes have been made, validate by seeing if
|
|
753 INSN is still valid.
|
|
754 validate_replace_rtx (from, to, insn) is equivalent to
|
|
755 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
|
|
756
|
|
757 int
|
|
758 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
|
|
759 {
|
|
760 validate_replace_rtx_1 (where, from, to, insn, true);
|
|
761 return apply_change_group ();
|
|
762 }
|
|
763
|
|
764 /* Same as above, but do not simplify rtx afterwards. */
|
|
765 int
|
|
766 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
|
|
767 rtx insn)
|
|
768 {
|
|
769 validate_replace_rtx_1 (where, from, to, insn, false);
|
|
770 return apply_change_group ();
|
|
771
|
|
772 }
|
|
773
|
|
774 /* Try replacing every occurrence of FROM in INSN with TO. */
|
|
775
|
|
776 void
|
|
777 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
|
|
778 {
|
|
779 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
|
|
780 }
|
|
781
|
|
782 /* Function called by note_uses to replace used subexpressions. */
|
|
783 struct validate_replace_src_data
|
|
784 {
|
|
785 rtx from; /* Old RTX */
|
|
786 rtx to; /* New RTX */
|
|
787 rtx insn; /* Insn in which substitution is occurring. */
|
|
788 };
|
|
789
|
|
790 static void
|
|
791 validate_replace_src_1 (rtx *x, void *data)
|
|
792 {
|
|
793 struct validate_replace_src_data *d
|
|
794 = (struct validate_replace_src_data *) data;
|
|
795
|
|
796 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
|
|
797 }
|
|
798
|
|
799 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
|
|
800 SET_DESTs. */
|
|
801
|
|
802 void
|
|
803 validate_replace_src_group (rtx from, rtx to, rtx insn)
|
|
804 {
|
|
805 struct validate_replace_src_data d;
|
|
806
|
|
807 d.from = from;
|
|
808 d.to = to;
|
|
809 d.insn = insn;
|
|
810 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
|
|
811 }
|
|
812
|
|
813 /* Try simplify INSN.
|
|
814 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
|
|
815 pattern and return true if something was simplified. */
|
|
816
|
|
817 bool
|
|
818 validate_simplify_insn (rtx insn)
|
|
819 {
|
|
820 int i;
|
|
821 rtx pat = NULL;
|
|
822 rtx newpat = NULL;
|
|
823
|
|
824 pat = PATTERN (insn);
|
|
825
|
|
826 if (GET_CODE (pat) == SET)
|
|
827 {
|
|
828 newpat = simplify_rtx (SET_SRC (pat));
|
|
829 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
|
|
830 validate_change (insn, &SET_SRC (pat), newpat, 1);
|
|
831 newpat = simplify_rtx (SET_DEST (pat));
|
|
832 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
|
|
833 validate_change (insn, &SET_DEST (pat), newpat, 1);
|
|
834 }
|
|
835 else if (GET_CODE (pat) == PARALLEL)
|
|
836 for (i = 0; i < XVECLEN (pat, 0); i++)
|
|
837 {
|
|
838 rtx s = XVECEXP (pat, 0, i);
|
|
839
|
|
840 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
|
|
841 {
|
|
842 newpat = simplify_rtx (SET_SRC (s));
|
|
843 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
|
|
844 validate_change (insn, &SET_SRC (s), newpat, 1);
|
|
845 newpat = simplify_rtx (SET_DEST (s));
|
|
846 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
|
|
847 validate_change (insn, &SET_DEST (s), newpat, 1);
|
|
848 }
|
|
849 }
|
|
850 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
|
|
851 }
|
|
852
|
|
853 #ifdef HAVE_cc0
|
|
854 /* Return 1 if the insn using CC0 set by INSN does not contain
|
|
855 any ordered tests applied to the condition codes.
|
|
856 EQ and NE tests do not count. */
|
|
857
|
|
858 int
|
|
859 next_insn_tests_no_inequality (rtx insn)
|
|
860 {
|
|
861 rtx next = next_cc0_user (insn);
|
|
862
|
|
863 /* If there is no next insn, we have to take the conservative choice. */
|
|
864 if (next == 0)
|
|
865 return 0;
|
|
866
|
|
867 return (INSN_P (next)
|
|
868 && ! inequality_comparisons_p (PATTERN (next)));
|
|
869 }
|
|
870 #endif
|
|
871
|
|
872 /* Return 1 if OP is a valid general operand for machine mode MODE.
|
|
873 This is either a register reference, a memory reference,
|
|
874 or a constant. In the case of a memory reference, the address
|
|
875 is checked for general validity for the target machine.
|
|
876
|
|
877 Register and memory references must have mode MODE in order to be valid,
|
|
878 but some constants have no machine mode and are valid for any mode.
|
|
879
|
|
880 If MODE is VOIDmode, OP is checked for validity for whatever mode
|
|
881 it has.
|
|
882
|
|
883 The main use of this function is as a predicate in match_operand
|
|
884 expressions in the machine description.
|
|
885
|
|
886 For an explanation of this function's behavior for registers of
|
|
887 class NO_REGS, see the comment for `register_operand'. */
|
|
888
|
|
889 int
|
|
890 general_operand (rtx op, enum machine_mode mode)
|
|
891 {
|
|
892 enum rtx_code code = GET_CODE (op);
|
|
893
|
|
894 if (mode == VOIDmode)
|
|
895 mode = GET_MODE (op);
|
|
896
|
|
897 /* Don't accept CONST_INT or anything similar
|
|
898 if the caller wants something floating. */
|
|
899 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
|
|
900 && GET_MODE_CLASS (mode) != MODE_INT
|
|
901 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
|
902 return 0;
|
|
903
|
|
904 if (GET_CODE (op) == CONST_INT
|
|
905 && mode != VOIDmode
|
|
906 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
|
907 return 0;
|
|
908
|
|
909 if (CONSTANT_P (op))
|
|
910 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
|
|
911 || mode == VOIDmode)
|
|
912 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
|
|
913 && LEGITIMATE_CONSTANT_P (op));
|
|
914
|
|
915 /* Except for certain constants with VOIDmode, already checked for,
|
|
916 OP's mode must match MODE if MODE specifies a mode. */
|
|
917
|
|
918 if (GET_MODE (op) != mode)
|
|
919 return 0;
|
|
920
|
|
921 if (code == SUBREG)
|
|
922 {
|
|
923 rtx sub = SUBREG_REG (op);
|
|
924
|
|
925 #ifdef INSN_SCHEDULING
|
|
926 /* On machines that have insn scheduling, we want all memory
|
|
927 reference to be explicit, so outlaw paradoxical SUBREGs.
|
|
928 However, we must allow them after reload so that they can
|
|
929 get cleaned up by cleanup_subreg_operands. */
|
|
930 if (!reload_completed && MEM_P (sub)
|
|
931 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
|
|
932 return 0;
|
|
933 #endif
|
|
934 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
|
|
935 may result in incorrect reference. We should simplify all valid
|
|
936 subregs of MEM anyway. But allow this after reload because we
|
|
937 might be called from cleanup_subreg_operands.
|
|
938
|
|
939 ??? This is a kludge. */
|
|
940 if (!reload_completed && SUBREG_BYTE (op) != 0
|
|
941 && MEM_P (sub))
|
|
942 return 0;
|
|
943
|
|
944 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
|
|
945 create such rtl, and we must reject it. */
|
|
946 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
|
|
947 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
|
|
948 return 0;
|
|
949
|
|
950 op = sub;
|
|
951 code = GET_CODE (op);
|
|
952 }
|
|
953
|
|
954 if (code == REG)
|
|
955 /* A register whose class is NO_REGS is not a general operand. */
|
|
956 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
|
|
957 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
|
|
958
|
|
959 if (code == MEM)
|
|
960 {
|
|
961 rtx y = XEXP (op, 0);
|
|
962
|
|
963 if (! volatile_ok && MEM_VOLATILE_P (op))
|
|
964 return 0;
|
|
965
|
|
966 /* Use the mem's mode, since it will be reloaded thus. */
|
|
967 if (memory_address_p (GET_MODE (op), y))
|
|
968 return 1;
|
|
969 }
|
|
970
|
|
971 return 0;
|
|
972 }
|
|
973
|
|
974 /* Return 1 if OP is a valid memory address for a memory reference
|
|
975 of mode MODE.
|
|
976
|
|
977 The main use of this function is as a predicate in match_operand
|
|
978 expressions in the machine description. */
|
|
979
|
|
980 int
|
|
981 address_operand (rtx op, enum machine_mode mode)
|
|
982 {
|
|
983 return memory_address_p (mode, op);
|
|
984 }
|
|
985
|
|
986 /* Return 1 if OP is a register reference of mode MODE.
|
|
987 If MODE is VOIDmode, accept a register in any mode.
|
|
988
|
|
989 The main use of this function is as a predicate in match_operand
|
|
990 expressions in the machine description.
|
|
991
|
|
992 As a special exception, registers whose class is NO_REGS are
|
|
993 not accepted by `register_operand'. The reason for this change
|
|
994 is to allow the representation of special architecture artifacts
|
|
995 (such as a condition code register) without extending the rtl
|
|
996 definitions. Since registers of class NO_REGS cannot be used
|
|
997 as registers in any case where register classes are examined,
|
|
998 it is most consistent to keep this function from accepting them. */
|
|
999
|
|
1000 int
|
|
1001 register_operand (rtx op, enum machine_mode mode)
|
|
1002 {
|
|
1003 if (GET_MODE (op) != mode && mode != VOIDmode)
|
|
1004 return 0;
|
|
1005
|
|
1006 if (GET_CODE (op) == SUBREG)
|
|
1007 {
|
|
1008 rtx sub = SUBREG_REG (op);
|
|
1009
|
|
1010 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
|
|
1011 because it is guaranteed to be reloaded into one.
|
|
1012 Just make sure the MEM is valid in itself.
|
|
1013 (Ideally, (SUBREG (MEM)...) should not exist after reload,
|
|
1014 but currently it does result from (SUBREG (REG)...) where the
|
|
1015 reg went on the stack.) */
|
|
1016 if (! reload_completed && MEM_P (sub))
|
|
1017 return general_operand (op, mode);
|
|
1018
|
|
1019 #ifdef CANNOT_CHANGE_MODE_CLASS
|
|
1020 if (REG_P (sub)
|
|
1021 && REGNO (sub) < FIRST_PSEUDO_REGISTER
|
|
1022 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
|
|
1023 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
|
|
1024 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
|
|
1025 return 0;
|
|
1026 #endif
|
|
1027
|
|
1028 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
|
|
1029 create such rtl, and we must reject it. */
|
|
1030 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
|
|
1031 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
|
|
1032 return 0;
|
|
1033
|
|
1034 op = sub;
|
|
1035 }
|
|
1036
|
|
1037 /* We don't consider registers whose class is NO_REGS
|
|
1038 to be a register operand. */
|
|
1039 return (REG_P (op)
|
|
1040 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
|
|
1041 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
|
|
1042 }
|
|
1043
|
|
1044 /* Return 1 for a register in Pmode; ignore the tested mode. */
|
|
1045
|
|
1046 int
|
|
1047 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
|
1048 {
|
|
1049 return register_operand (op, Pmode);
|
|
1050 }
|
|
1051
|
|
1052 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
|
|
1053 or a hard register. */
|
|
1054
|
|
1055 int
|
|
1056 scratch_operand (rtx op, enum machine_mode mode)
|
|
1057 {
|
|
1058 if (GET_MODE (op) != mode && mode != VOIDmode)
|
|
1059 return 0;
|
|
1060
|
|
1061 return (GET_CODE (op) == SCRATCH
|
|
1062 || (REG_P (op)
|
|
1063 && REGNO (op) < FIRST_PSEUDO_REGISTER));
|
|
1064 }
|
|
1065
|
|
1066 /* Return 1 if OP is a valid immediate operand for mode MODE.
|
|
1067
|
|
1068 The main use of this function is as a predicate in match_operand
|
|
1069 expressions in the machine description. */
|
|
1070
|
|
1071 int
|
|
1072 immediate_operand (rtx op, enum machine_mode mode)
|
|
1073 {
|
|
1074 /* Don't accept CONST_INT or anything similar
|
|
1075 if the caller wants something floating. */
|
|
1076 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
|
|
1077 && GET_MODE_CLASS (mode) != MODE_INT
|
|
1078 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
|
1079 return 0;
|
|
1080
|
|
1081 if (GET_CODE (op) == CONST_INT
|
|
1082 && mode != VOIDmode
|
|
1083 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
|
1084 return 0;
|
|
1085
|
|
1086 return (CONSTANT_P (op)
|
|
1087 && (GET_MODE (op) == mode || mode == VOIDmode
|
|
1088 || GET_MODE (op) == VOIDmode)
|
|
1089 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
|
|
1090 && LEGITIMATE_CONSTANT_P (op));
|
|
1091 }
|
|
1092
|
|
1093 /* Returns 1 if OP is an operand that is a CONST_INT. */
|
|
1094
|
|
1095 int
|
|
1096 const_int_operand (rtx op, enum machine_mode mode)
|
|
1097 {
|
|
1098 if (GET_CODE (op) != CONST_INT)
|
|
1099 return 0;
|
|
1100
|
|
1101 if (mode != VOIDmode
|
|
1102 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
|
1103 return 0;
|
|
1104
|
|
1105 return 1;
|
|
1106 }
|
|
1107
|
|
1108 /* Returns 1 if OP is an operand that is a constant integer or constant
|
|
1109 floating-point number. */
|
|
1110
|
|
1111 int
|
|
1112 const_double_operand (rtx op, enum machine_mode mode)
|
|
1113 {
|
|
1114 /* Don't accept CONST_INT or anything similar
|
|
1115 if the caller wants something floating. */
|
|
1116 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
|
|
1117 && GET_MODE_CLASS (mode) != MODE_INT
|
|
1118 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
|
1119 return 0;
|
|
1120
|
|
1121 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
|
|
1122 && (mode == VOIDmode || GET_MODE (op) == mode
|
|
1123 || GET_MODE (op) == VOIDmode));
|
|
1124 }
|
|
1125
|
|
1126 /* Return 1 if OP is a general operand that is not an immediate operand. */
|
|
1127
|
|
1128 int
|
|
1129 nonimmediate_operand (rtx op, enum machine_mode mode)
|
|
1130 {
|
|
1131 return (general_operand (op, mode) && ! CONSTANT_P (op));
|
|
1132 }
|
|
1133
|
|
1134 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
|
|
1135
|
|
1136 int
|
|
1137 nonmemory_operand (rtx op, enum machine_mode mode)
|
|
1138 {
|
|
1139 if (CONSTANT_P (op))
|
|
1140 {
|
|
1141 /* Don't accept CONST_INT or anything similar
|
|
1142 if the caller wants something floating. */
|
|
1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
|
|
1144 && GET_MODE_CLASS (mode) != MODE_INT
|
|
1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
|
1146 return 0;
|
|
1147
|
|
1148 if (GET_CODE (op) == CONST_INT
|
|
1149 && mode != VOIDmode
|
|
1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
|
1151 return 0;
|
|
1152
|
|
1153 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
|
|
1154 || mode == VOIDmode)
|
|
1155 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
|
|
1156 && LEGITIMATE_CONSTANT_P (op));
|
|
1157 }
|
|
1158
|
|
1159 if (GET_MODE (op) != mode && mode != VOIDmode)
|
|
1160 return 0;
|
|
1161
|
|
1162 if (GET_CODE (op) == SUBREG)
|
|
1163 {
|
|
1164 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
|
|
1165 because it is guaranteed to be reloaded into one.
|
|
1166 Just make sure the MEM is valid in itself.
|
|
1167 (Ideally, (SUBREG (MEM)...) should not exist after reload,
|
|
1168 but currently it does result from (SUBREG (REG)...) where the
|
|
1169 reg went on the stack.) */
|
|
1170 if (! reload_completed && MEM_P (SUBREG_REG (op)))
|
|
1171 return general_operand (op, mode);
|
|
1172 op = SUBREG_REG (op);
|
|
1173 }
|
|
1174
|
|
1175 /* We don't consider registers whose class is NO_REGS
|
|
1176 to be a register operand. */
|
|
1177 return (REG_P (op)
|
|
1178 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
|
|
1179 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
|
|
1180 }
|
|
1181
|
|
1182 /* Return 1 if OP is a valid operand that stands for pushing a
|
|
1183 value of mode MODE onto the stack.
|
|
1184
|
|
1185 The main use of this function is as a predicate in match_operand
|
|
1186 expressions in the machine description. */
|
|
1187
|
|
1188 int
|
|
1189 push_operand (rtx op, enum machine_mode mode)
|
|
1190 {
|
|
1191 unsigned int rounded_size = GET_MODE_SIZE (mode);
|
|
1192
|
|
1193 #ifdef PUSH_ROUNDING
|
|
1194 rounded_size = PUSH_ROUNDING (rounded_size);
|
|
1195 #endif
|
|
1196
|
|
1197 if (!MEM_P (op))
|
|
1198 return 0;
|
|
1199
|
|
1200 if (mode != VOIDmode && GET_MODE (op) != mode)
|
|
1201 return 0;
|
|
1202
|
|
1203 op = XEXP (op, 0);
|
|
1204
|
|
1205 if (rounded_size == GET_MODE_SIZE (mode))
|
|
1206 {
|
|
1207 if (GET_CODE (op) != STACK_PUSH_CODE)
|
|
1208 return 0;
|
|
1209 }
|
|
1210 else
|
|
1211 {
|
|
1212 if (GET_CODE (op) != PRE_MODIFY
|
|
1213 || GET_CODE (XEXP (op, 1)) != PLUS
|
|
1214 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
|
|
1215 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
|
|
1216 #ifdef STACK_GROWS_DOWNWARD
|
|
1217 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
|
|
1218 #else
|
|
1219 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
|
|
1220 #endif
|
|
1221 )
|
|
1222 return 0;
|
|
1223 }
|
|
1224
|
|
1225 return XEXP (op, 0) == stack_pointer_rtx;
|
|
1226 }
|
|
1227
|
|
1228 /* Return 1 if OP is a valid operand that stands for popping a
|
|
1229 value of mode MODE off the stack.
|
|
1230
|
|
1231 The main use of this function is as a predicate in match_operand
|
|
1232 expressions in the machine description. */
|
|
1233
|
|
1234 int
|
|
1235 pop_operand (rtx op, enum machine_mode mode)
|
|
1236 {
|
|
1237 if (!MEM_P (op))
|
|
1238 return 0;
|
|
1239
|
|
1240 if (mode != VOIDmode && GET_MODE (op) != mode)
|
|
1241 return 0;
|
|
1242
|
|
1243 op = XEXP (op, 0);
|
|
1244
|
|
1245 if (GET_CODE (op) != STACK_POP_CODE)
|
|
1246 return 0;
|
|
1247
|
|
1248 return XEXP (op, 0) == stack_pointer_rtx;
|
|
1249 }
|
|
1250
|
|
1251 /* Return 1 if ADDR is a valid memory address for mode MODE. */
|
|
1252
|
|
1253 int
|
|
1254 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
|
|
1255 {
|
|
1256 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
|
|
1257 return 0;
|
|
1258
|
|
1259 win:
|
|
1260 return 1;
|
|
1261 }
|
|
1262
|
|
1263 /* Return 1 if OP is a valid memory reference with mode MODE,
|
|
1264 including a valid address.
|
|
1265
|
|
1266 The main use of this function is as a predicate in match_operand
|
|
1267 expressions in the machine description. */
|
|
1268
|
|
1269 int
|
|
1270 memory_operand (rtx op, enum machine_mode mode)
|
|
1271 {
|
|
1272 rtx inner;
|
|
1273
|
|
1274 if (! reload_completed)
|
|
1275 /* Note that no SUBREG is a memory operand before end of reload pass,
|
|
1276 because (SUBREG (MEM...)) forces reloading into a register. */
|
|
1277 return MEM_P (op) && general_operand (op, mode);
|
|
1278
|
|
1279 if (mode != VOIDmode && GET_MODE (op) != mode)
|
|
1280 return 0;
|
|
1281
|
|
1282 inner = op;
|
|
1283 if (GET_CODE (inner) == SUBREG)
|
|
1284 inner = SUBREG_REG (inner);
|
|
1285
|
|
1286 return (MEM_P (inner) && general_operand (op, mode));
|
|
1287 }
|
|
1288
|
|
1289 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
|
|
1290 that is, a memory reference whose address is a general_operand. */
|
|
1291
|
|
1292 int
|
|
1293 indirect_operand (rtx op, enum machine_mode mode)
|
|
1294 {
|
|
1295 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
|
|
1296 if (! reload_completed
|
|
1297 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
|
|
1298 {
|
|
1299 int offset = SUBREG_BYTE (op);
|
|
1300 rtx inner = SUBREG_REG (op);
|
|
1301
|
|
1302 if (mode != VOIDmode && GET_MODE (op) != mode)
|
|
1303 return 0;
|
|
1304
|
|
1305 /* The only way that we can have a general_operand as the resulting
|
|
1306 address is if OFFSET is zero and the address already is an operand
|
|
1307 or if the address is (plus Y (const_int -OFFSET)) and Y is an
|
|
1308 operand. */
|
|
1309
|
|
1310 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
|
|
1311 || (GET_CODE (XEXP (inner, 0)) == PLUS
|
|
1312 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
|
|
1313 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
|
|
1314 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
|
|
1315 }
|
|
1316
|
|
1317 return (MEM_P (op)
|
|
1318 && memory_operand (op, mode)
|
|
1319 && general_operand (XEXP (op, 0), Pmode));
|
|
1320 }
|
|
1321
|
|
1322 /* Return 1 if this is a comparison operator. This allows the use of
|
|
1323 MATCH_OPERATOR to recognize all the branch insns. */
|
|
1324
|
|
1325 int
|
|
1326 comparison_operator (rtx op, enum machine_mode mode)
|
|
1327 {
|
|
1328 return ((mode == VOIDmode || GET_MODE (op) == mode)
|
|
1329 && COMPARISON_P (op));
|
|
1330 }
|
|
1331
|
|
1332 /* If BODY is an insn body that uses ASM_OPERANDS,
|
|
1333 return the number of operands (both input and output) in the insn.
|
|
1334 Otherwise return -1. */
|
|
1335
|
|
1336 int
|
|
1337 asm_noperands (const_rtx body)
|
|
1338 {
|
|
1339 switch (GET_CODE (body))
|
|
1340 {
|
|
1341 case ASM_OPERANDS:
|
|
1342 /* No output operands: return number of input operands. */
|
|
1343 return ASM_OPERANDS_INPUT_LENGTH (body);
|
|
1344 case SET:
|
|
1345 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
|
|
1346 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
|
|
1347 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
|
|
1348 else
|
|
1349 return -1;
|
|
1350 case PARALLEL:
|
|
1351 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
|
|
1352 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
|
|
1353 {
|
|
1354 /* Multiple output operands, or 1 output plus some clobbers:
|
|
1355 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
|
|
1356 int i;
|
|
1357 int n_sets;
|
|
1358
|
|
1359 /* Count backwards through CLOBBERs to determine number of SETs. */
|
|
1360 for (i = XVECLEN (body, 0); i > 0; i--)
|
|
1361 {
|
|
1362 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
|
|
1363 break;
|
|
1364 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
|
|
1365 return -1;
|
|
1366 }
|
|
1367
|
|
1368 /* N_SETS is now number of output operands. */
|
|
1369 n_sets = i;
|
|
1370
|
|
1371 /* Verify that all the SETs we have
|
|
1372 came from a single original asm_operands insn
|
|
1373 (so that invalid combinations are blocked). */
|
|
1374 for (i = 0; i < n_sets; i++)
|
|
1375 {
|
|
1376 rtx elt = XVECEXP (body, 0, i);
|
|
1377 if (GET_CODE (elt) != SET)
|
|
1378 return -1;
|
|
1379 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
|
|
1380 return -1;
|
|
1381 /* If these ASM_OPERANDS rtx's came from different original insns
|
|
1382 then they aren't allowed together. */
|
|
1383 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
|
|
1384 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
|
|
1385 return -1;
|
|
1386 }
|
|
1387 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
|
|
1388 + n_sets);
|
|
1389 }
|
|
1390 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
|
|
1391 {
|
|
1392 /* 0 outputs, but some clobbers:
|
|
1393 body is [(asm_operands ...) (clobber (reg ...))...]. */
|
|
1394 int i;
|
|
1395
|
|
1396 /* Make sure all the other parallel things really are clobbers. */
|
|
1397 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
|
|
1398 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
|
|
1399 return -1;
|
|
1400
|
|
1401 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
|
|
1402 }
|
|
1403 else
|
|
1404 return -1;
|
|
1405 default:
|
|
1406 return -1;
|
|
1407 }
|
|
1408 }
|
|
1409
|
|
1410 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
|
|
1411 copy its operands (both input and output) into the vector OPERANDS,
|
|
1412 the locations of the operands within the insn into the vector OPERAND_LOCS,
|
|
1413 and the constraints for the operands into CONSTRAINTS.
|
|
1414 Write the modes of the operands into MODES.
|
|
1415 Return the assembler-template.
|
|
1416
|
|
1417 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
|
|
1418 we don't store that info. */
|
|
1419
|
|
1420 const char *
|
|
1421 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
|
|
1422 const char **constraints, enum machine_mode *modes,
|
|
1423 location_t *loc)
|
|
1424 {
|
|
1425 int i;
|
|
1426 int noperands;
|
|
1427 rtx asmop = 0;
|
|
1428
|
|
1429 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
|
|
1430 {
|
|
1431 asmop = SET_SRC (body);
|
|
1432 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
|
|
1433
|
|
1434 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
|
|
1435
|
|
1436 for (i = 1; i < noperands; i++)
|
|
1437 {
|
|
1438 if (operand_locs)
|
|
1439 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
|
|
1440 if (operands)
|
|
1441 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
|
|
1442 if (constraints)
|
|
1443 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
|
|
1444 if (modes)
|
|
1445 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
|
|
1446 }
|
|
1447
|
|
1448 /* The output is in the SET.
|
|
1449 Its constraint is in the ASM_OPERANDS itself. */
|
|
1450 if (operands)
|
|
1451 operands[0] = SET_DEST (body);
|
|
1452 if (operand_locs)
|
|
1453 operand_locs[0] = &SET_DEST (body);
|
|
1454 if (constraints)
|
|
1455 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
|
|
1456 if (modes)
|
|
1457 modes[0] = GET_MODE (SET_DEST (body));
|
|
1458 }
|
|
1459 else if (GET_CODE (body) == ASM_OPERANDS)
|
|
1460 {
|
|
1461 asmop = body;
|
|
1462 /* No output operands: BODY is (asm_operands ....). */
|
|
1463
|
|
1464 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
|
|
1465
|
|
1466 /* The input operands are found in the 1st element vector. */
|
|
1467 /* Constraints for inputs are in the 2nd element vector. */
|
|
1468 for (i = 0; i < noperands; i++)
|
|
1469 {
|
|
1470 if (operand_locs)
|
|
1471 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
|
|
1472 if (operands)
|
|
1473 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
|
|
1474 if (constraints)
|
|
1475 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
|
|
1476 if (modes)
|
|
1477 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
|
|
1478 }
|
|
1479 }
|
|
1480 else if (GET_CODE (body) == PARALLEL
|
|
1481 && GET_CODE (XVECEXP (body, 0, 0)) == SET
|
|
1482 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
|
|
1483 {
|
|
1484 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
|
|
1485 int nin;
|
|
1486 int nout = 0; /* Does not include CLOBBERs. */
|
|
1487
|
|
1488 asmop = SET_SRC (XVECEXP (body, 0, 0));
|
|
1489 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
|
|
1490
|
|
1491 /* At least one output, plus some CLOBBERs. */
|
|
1492
|
|
1493 /* The outputs are in the SETs.
|
|
1494 Their constraints are in the ASM_OPERANDS itself. */
|
|
1495 for (i = 0; i < nparallel; i++)
|
|
1496 {
|
|
1497 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
|
|
1498 break; /* Past last SET */
|
|
1499
|
|
1500 if (operands)
|
|
1501 operands[i] = SET_DEST (XVECEXP (body, 0, i));
|
|
1502 if (operand_locs)
|
|
1503 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
|
|
1504 if (constraints)
|
|
1505 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
|
|
1506 if (modes)
|
|
1507 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
|
|
1508 nout++;
|
|
1509 }
|
|
1510
|
|
1511 for (i = 0; i < nin; i++)
|
|
1512 {
|
|
1513 if (operand_locs)
|
|
1514 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
|
|
1515 if (operands)
|
|
1516 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
|
|
1517 if (constraints)
|
|
1518 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
|
|
1519 if (modes)
|
|
1520 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
|
|
1521 }
|
|
1522 }
|
|
1523 else if (GET_CODE (body) == PARALLEL
|
|
1524 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
|
|
1525 {
|
|
1526 /* No outputs, but some CLOBBERs. */
|
|
1527
|
|
1528 int nin;
|
|
1529
|
|
1530 asmop = XVECEXP (body, 0, 0);
|
|
1531 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
|
|
1532
|
|
1533 for (i = 0; i < nin; i++)
|
|
1534 {
|
|
1535 if (operand_locs)
|
|
1536 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
|
|
1537 if (operands)
|
|
1538 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
|
|
1539 if (constraints)
|
|
1540 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
|
|
1541 if (modes)
|
|
1542 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
|
|
1543 }
|
|
1544
|
|
1545 }
|
|
1546
|
|
1547 if (loc)
|
|
1548 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
|
|
1549
|
|
1550 return ASM_OPERANDS_TEMPLATE (asmop);
|
|
1551 }
|
|
1552
|
|
1553 /* Check if an asm_operand matches its constraints.
|
|
1554 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
|
|
1555
|
|
1556 int
|
|
1557 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
|
|
1558 {
|
|
1559 int result = 0;
|
|
1560
|
|
1561 /* Use constrain_operands after reload. */
|
|
1562 gcc_assert (!reload_completed);
|
|
1563
|
|
1564 while (*constraint)
|
|
1565 {
|
|
1566 char c = *constraint;
|
|
1567 int len;
|
|
1568 switch (c)
|
|
1569 {
|
|
1570 case ',':
|
|
1571 constraint++;
|
|
1572 continue;
|
|
1573 case '=':
|
|
1574 case '+':
|
|
1575 case '*':
|
|
1576 case '%':
|
|
1577 case '!':
|
|
1578 case '#':
|
|
1579 case '&':
|
|
1580 case '?':
|
|
1581 break;
|
|
1582
|
|
1583 case '0': case '1': case '2': case '3': case '4':
|
|
1584 case '5': case '6': case '7': case '8': case '9':
|
|
1585 /* If caller provided constraints pointer, look up
|
|
1586 the maching constraint. Otherwise, our caller should have
|
|
1587 given us the proper matching constraint, but we can't
|
|
1588 actually fail the check if they didn't. Indicate that
|
|
1589 results are inconclusive. */
|
|
1590 if (constraints)
|
|
1591 {
|
|
1592 char *end;
|
|
1593 unsigned long match;
|
|
1594
|
|
1595 match = strtoul (constraint, &end, 10);
|
|
1596 if (!result)
|
|
1597 result = asm_operand_ok (op, constraints[match], NULL);
|
|
1598 constraint = (const char *) end;
|
|
1599 }
|
|
1600 else
|
|
1601 {
|
|
1602 do
|
|
1603 constraint++;
|
|
1604 while (ISDIGIT (*constraint));
|
|
1605 if (! result)
|
|
1606 result = -1;
|
|
1607 }
|
|
1608 continue;
|
|
1609
|
|
1610 case 'p':
|
|
1611 if (address_operand (op, VOIDmode))
|
|
1612 result = 1;
|
|
1613 break;
|
|
1614
|
|
1615 case TARGET_MEM_CONSTRAINT:
|
|
1616 case 'V': /* non-offsettable */
|
|
1617 if (memory_operand (op, VOIDmode))
|
|
1618 result = 1;
|
|
1619 break;
|
|
1620
|
|
1621 case 'o': /* offsettable */
|
|
1622 if (offsettable_nonstrict_memref_p (op))
|
|
1623 result = 1;
|
|
1624 break;
|
|
1625
|
|
1626 case '<':
|
|
1627 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
|
|
1628 excepting those that expand_call created. Further, on some
|
|
1629 machines which do not have generalized auto inc/dec, an inc/dec
|
|
1630 is not a memory_operand.
|
|
1631
|
|
1632 Match any memory and hope things are resolved after reload. */
|
|
1633
|
|
1634 if (MEM_P (op)
|
|
1635 && (1
|
|
1636 || GET_CODE (XEXP (op, 0)) == PRE_DEC
|
|
1637 || GET_CODE (XEXP (op, 0)) == POST_DEC))
|
|
1638 result = 1;
|
|
1639 break;
|
|
1640
|
|
1641 case '>':
|
|
1642 if (MEM_P (op)
|
|
1643 && (1
|
|
1644 || GET_CODE (XEXP (op, 0)) == PRE_INC
|
|
1645 || GET_CODE (XEXP (op, 0)) == POST_INC))
|
|
1646 result = 1;
|
|
1647 break;
|
|
1648
|
|
1649 case 'E':
|
|
1650 case 'F':
|
|
1651 if (GET_CODE (op) == CONST_DOUBLE
|
|
1652 || (GET_CODE (op) == CONST_VECTOR
|
|
1653 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
|
|
1654 result = 1;
|
|
1655 break;
|
|
1656
|
|
1657 case 'G':
|
|
1658 if (GET_CODE (op) == CONST_DOUBLE
|
|
1659 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
|
|
1660 result = 1;
|
|
1661 break;
|
|
1662 case 'H':
|
|
1663 if (GET_CODE (op) == CONST_DOUBLE
|
|
1664 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
|
|
1665 result = 1;
|
|
1666 break;
|
|
1667
|
|
1668 case 's':
|
|
1669 if (GET_CODE (op) == CONST_INT
|
|
1670 || (GET_CODE (op) == CONST_DOUBLE
|
|
1671 && GET_MODE (op) == VOIDmode))
|
|
1672 break;
|
|
1673 /* Fall through. */
|
|
1674
|
|
1675 case 'i':
|
|
1676 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
|
|
1677 result = 1;
|
|
1678 break;
|
|
1679
|
|
1680 case 'n':
|
|
1681 if (GET_CODE (op) == CONST_INT
|
|
1682 || (GET_CODE (op) == CONST_DOUBLE
|
|
1683 && GET_MODE (op) == VOIDmode))
|
|
1684 result = 1;
|
|
1685 break;
|
|
1686
|
|
1687 case 'I':
|
|
1688 if (GET_CODE (op) == CONST_INT
|
|
1689 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
|
|
1690 result = 1;
|
|
1691 break;
|
|
1692 case 'J':
|
|
1693 if (GET_CODE (op) == CONST_INT
|
|
1694 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
|
|
1695 result = 1;
|
|
1696 break;
|
|
1697 case 'K':
|
|
1698 if (GET_CODE (op) == CONST_INT
|
|
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
|
|
1700 result = 1;
|
|
1701 break;
|
|
1702 case 'L':
|
|
1703 if (GET_CODE (op) == CONST_INT
|
|
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
|
|
1705 result = 1;
|
|
1706 break;
|
|
1707 case 'M':
|
|
1708 if (GET_CODE (op) == CONST_INT
|
|
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
|
|
1710 result = 1;
|
|
1711 break;
|
|
1712 case 'N':
|
|
1713 if (GET_CODE (op) == CONST_INT
|
|
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
|
|
1715 result = 1;
|
|
1716 break;
|
|
1717 case 'O':
|
|
1718 if (GET_CODE (op) == CONST_INT
|
|
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
|
|
1720 result = 1;
|
|
1721 break;
|
|
1722 case 'P':
|
|
1723 if (GET_CODE (op) == CONST_INT
|
|
1724 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
|
|
1725 result = 1;
|
|
1726 break;
|
|
1727
|
|
1728 case 'X':
|
|
1729 result = 1;
|
|
1730 break;
|
|
1731
|
|
1732 case 'g':
|
|
1733 if (general_operand (op, VOIDmode))
|
|
1734 result = 1;
|
|
1735 break;
|
|
1736
|
|
1737 default:
|
|
1738 /* For all other letters, we first check for a register class,
|
|
1739 otherwise it is an EXTRA_CONSTRAINT. */
|
|
1740 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
|
|
1741 {
|
|
1742 case 'r':
|
|
1743 if (GET_MODE (op) == BLKmode)
|
|
1744 break;
|
|
1745 if (register_operand (op, VOIDmode))
|
|
1746 result = 1;
|
|
1747 }
|
|
1748 #ifdef EXTRA_CONSTRAINT_STR
|
|
1749 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
|
|
1750 /* Every memory operand can be reloaded to fit. */
|
|
1751 result = result || memory_operand (op, VOIDmode);
|
|
1752 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
|
|
1753 /* Every address operand can be reloaded to fit. */
|
|
1754 result = result || address_operand (op, VOIDmode);
|
|
1755 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
|
|
1756 result = 1;
|
|
1757 #endif
|
|
1758 break;
|
|
1759 }
|
|
1760 len = CONSTRAINT_LEN (c, constraint);
|
|
1761 do
|
|
1762 constraint++;
|
|
1763 while (--len && *constraint);
|
|
1764 if (len)
|
|
1765 return 0;
|
|
1766 }
|
|
1767
|
|
1768 return result;
|
|
1769 }
|
|
1770
|
|
1771 /* Given an rtx *P, if it is a sum containing an integer constant term,
|
|
1772 return the location (type rtx *) of the pointer to that constant term.
|
|
1773 Otherwise, return a null pointer. */
|
|
1774
|
|
1775 rtx *
|
|
1776 find_constant_term_loc (rtx *p)
|
|
1777 {
|
|
1778 rtx *tem;
|
|
1779 enum rtx_code code = GET_CODE (*p);
|
|
1780
|
|
1781 /* If *P IS such a constant term, P is its location. */
|
|
1782
|
|
1783 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
|
|
1784 || code == CONST)
|
|
1785 return p;
|
|
1786
|
|
1787 /* Otherwise, if not a sum, it has no constant term. */
|
|
1788
|
|
1789 if (GET_CODE (*p) != PLUS)
|
|
1790 return 0;
|
|
1791
|
|
1792 /* If one of the summands is constant, return its location. */
|
|
1793
|
|
1794 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
|
|
1795 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
|
|
1796 return p;
|
|
1797
|
|
1798 /* Otherwise, check each summand for containing a constant term. */
|
|
1799
|
|
1800 if (XEXP (*p, 0) != 0)
|
|
1801 {
|
|
1802 tem = find_constant_term_loc (&XEXP (*p, 0));
|
|
1803 if (tem != 0)
|
|
1804 return tem;
|
|
1805 }
|
|
1806
|
|
1807 if (XEXP (*p, 1) != 0)
|
|
1808 {
|
|
1809 tem = find_constant_term_loc (&XEXP (*p, 1));
|
|
1810 if (tem != 0)
|
|
1811 return tem;
|
|
1812 }
|
|
1813
|
|
1814 return 0;
|
|
1815 }
|
|
1816
|
|
1817 /* Return 1 if OP is a memory reference
|
|
1818 whose address contains no side effects
|
|
1819 and remains valid after the addition
|
|
1820 of a positive integer less than the
|
|
1821 size of the object being referenced.
|
|
1822
|
|
1823 We assume that the original address is valid and do not check it.
|
|
1824
|
|
1825 This uses strict_memory_address_p as a subroutine, so
|
|
1826 don't use it before reload. */
|
|
1827
|
|
1828 int
|
|
1829 offsettable_memref_p (rtx op)
|
|
1830 {
|
|
1831 return ((MEM_P (op))
|
|
1832 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
|
|
1833 }
|
|
1834
|
|
1835 /* Similar, but don't require a strictly valid mem ref:
|
|
1836 consider pseudo-regs valid as index or base regs. */
|
|
1837
|
|
1838 int
|
|
1839 offsettable_nonstrict_memref_p (rtx op)
|
|
1840 {
|
|
1841 return ((MEM_P (op))
|
|
1842 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
|
|
1843 }
|
|
1844
|
|
1845 /* Return 1 if Y is a memory address which contains no side effects
|
|
1846 and would remain valid after the addition of a positive integer
|
|
1847 less than the size of that mode.
|
|
1848
|
|
1849 We assume that the original address is valid and do not check it.
|
|
1850 We do check that it is valid for narrower modes.
|
|
1851
|
|
1852 If STRICTP is nonzero, we require a strictly valid address,
|
|
1853 for the sake of use in reload.c. */
|
|
1854
|
|
1855 int
|
|
1856 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
|
|
1857 {
|
|
1858 enum rtx_code ycode = GET_CODE (y);
|
|
1859 rtx z;
|
|
1860 rtx y1 = y;
|
|
1861 rtx *y2;
|
|
1862 int (*addressp) (enum machine_mode, rtx) =
|
|
1863 (strictp ? strict_memory_address_p : memory_address_p);
|
|
1864 unsigned int mode_sz = GET_MODE_SIZE (mode);
|
|
1865
|
|
1866 if (CONSTANT_ADDRESS_P (y))
|
|
1867 return 1;
|
|
1868
|
|
1869 /* Adjusting an offsettable address involves changing to a narrower mode.
|
|
1870 Make sure that's OK. */
|
|
1871
|
|
1872 if (mode_dependent_address_p (y))
|
|
1873 return 0;
|
|
1874
|
|
1875 /* ??? How much offset does an offsettable BLKmode reference need?
|
|
1876 Clearly that depends on the situation in which it's being used.
|
|
1877 However, the current situation in which we test 0xffffffff is
|
|
1878 less than ideal. Caveat user. */
|
|
1879 if (mode_sz == 0)
|
|
1880 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
|
|
1881
|
|
1882 /* If the expression contains a constant term,
|
|
1883 see if it remains valid when max possible offset is added. */
|
|
1884
|
|
1885 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
|
|
1886 {
|
|
1887 int good;
|
|
1888
|
|
1889 y1 = *y2;
|
|
1890 *y2 = plus_constant (*y2, mode_sz - 1);
|
|
1891 /* Use QImode because an odd displacement may be automatically invalid
|
|
1892 for any wider mode. But it should be valid for a single byte. */
|
|
1893 good = (*addressp) (QImode, y);
|
|
1894
|
|
1895 /* In any case, restore old contents of memory. */
|
|
1896 *y2 = y1;
|
|
1897 return good;
|
|
1898 }
|
|
1899
|
|
1900 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
|
|
1901 return 0;
|
|
1902
|
|
1903 /* The offset added here is chosen as the maximum offset that
|
|
1904 any instruction could need to add when operating on something
|
|
1905 of the specified mode. We assume that if Y and Y+c are
|
|
1906 valid addresses then so is Y+d for all 0<d<c. adjust_address will
|
|
1907 go inside a LO_SUM here, so we do so as well. */
|
|
1908 if (GET_CODE (y) == LO_SUM
|
|
1909 && mode != BLKmode
|
|
1910 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
|
|
1911 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
|
|
1912 plus_constant (XEXP (y, 1), mode_sz - 1));
|
|
1913 else
|
|
1914 z = plus_constant (y, mode_sz - 1);
|
|
1915
|
|
1916 /* Use QImode because an odd displacement may be automatically invalid
|
|
1917 for any wider mode. But it should be valid for a single byte. */
|
|
1918 return (*addressp) (QImode, z);
|
|
1919 }
|
|
1920
|
|
1921 /* Return 1 if ADDR is an address-expression whose effect depends
|
|
1922 on the mode of the memory reference it is used in.
|
|
1923
|
|
1924 Autoincrement addressing is a typical example of mode-dependence
|
|
1925 because the amount of the increment depends on the mode. */
|
|
1926
|
|
1927 int
|
|
1928 mode_dependent_address_p (rtx addr)
|
|
1929 {
|
|
1930 /* Auto-increment addressing with anything other than post_modify
|
|
1931 or pre_modify always introduces a mode dependency. Catch such
|
|
1932 cases now instead of deferring to the target. */
|
|
1933 if (GET_CODE (addr) == PRE_INC
|
|
1934 || GET_CODE (addr) == POST_INC
|
|
1935 || GET_CODE (addr) == PRE_DEC
|
|
1936 || GET_CODE (addr) == POST_DEC)
|
|
1937 return 1;
|
|
1938
|
|
1939 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
|
|
1940 return 0;
|
|
1941 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
|
|
1942 win: ATTRIBUTE_UNUSED_LABEL
|
|
1943 return 1;
|
|
1944 }
|
|
1945
|
|
1946 /* Like extract_insn, but save insn extracted and don't extract again, when
|
|
1947 called again for the same insn expecting that recog_data still contain the
|
|
1948 valid information. This is used primary by gen_attr infrastructure that
|
|
1949 often does extract insn again and again. */
|
|
1950 void
|
|
1951 extract_insn_cached (rtx insn)
|
|
1952 {
|
|
1953 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
|
|
1954 return;
|
|
1955 extract_insn (insn);
|
|
1956 recog_data.insn = insn;
|
|
1957 }
|
|
1958
|
|
1959 /* Do cached extract_insn, constrain_operands and complain about failures.
|
|
1960 Used by insn_attrtab. */
|
|
1961 void
|
|
1962 extract_constrain_insn_cached (rtx insn)
|
|
1963 {
|
|
1964 extract_insn_cached (insn);
|
|
1965 if (which_alternative == -1
|
|
1966 && !constrain_operands (reload_completed))
|
|
1967 fatal_insn_not_found (insn);
|
|
1968 }
|
|
1969
|
|
1970 /* Do cached constrain_operands and complain about failures. */
|
|
1971 int
|
|
1972 constrain_operands_cached (int strict)
|
|
1973 {
|
|
1974 if (which_alternative == -1)
|
|
1975 return constrain_operands (strict);
|
|
1976 else
|
|
1977 return 1;
|
|
1978 }
|
|
1979
|
|
1980 /* Analyze INSN and fill in recog_data. */
|
|
1981
|
|
1982 void
|
|
1983 extract_insn (rtx insn)
|
|
1984 {
|
|
1985 int i;
|
|
1986 int icode;
|
|
1987 int noperands;
|
|
1988 rtx body = PATTERN (insn);
|
|
1989
|
|
1990 recog_data.n_operands = 0;
|
|
1991 recog_data.n_alternatives = 0;
|
|
1992 recog_data.n_dups = 0;
|
|
1993
|
|
1994 switch (GET_CODE (body))
|
|
1995 {
|
|
1996 case USE:
|
|
1997 case CLOBBER:
|
|
1998 case ASM_INPUT:
|
|
1999 case ADDR_VEC:
|
|
2000 case ADDR_DIFF_VEC:
|
|
2001 return;
|
|
2002
|
|
2003 case SET:
|
|
2004 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
|
|
2005 goto asm_insn;
|
|
2006 else
|
|
2007 goto normal_insn;
|
|
2008 case PARALLEL:
|
|
2009 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
|
|
2010 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
|
|
2011 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
|
|
2012 goto asm_insn;
|
|
2013 else
|
|
2014 goto normal_insn;
|
|
2015 case ASM_OPERANDS:
|
|
2016 asm_insn:
|
|
2017 recog_data.n_operands = noperands = asm_noperands (body);
|
|
2018 if (noperands >= 0)
|
|
2019 {
|
|
2020 /* This insn is an `asm' with operands. */
|
|
2021
|
|
2022 /* expand_asm_operands makes sure there aren't too many operands. */
|
|
2023 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
|
|
2024
|
|
2025 /* Now get the operand values and constraints out of the insn. */
|
|
2026 decode_asm_operands (body, recog_data.operand,
|
|
2027 recog_data.operand_loc,
|
|
2028 recog_data.constraints,
|
|
2029 recog_data.operand_mode, NULL);
|
|
2030 if (noperands > 0)
|
|
2031 {
|
|
2032 const char *p = recog_data.constraints[0];
|
|
2033 recog_data.n_alternatives = 1;
|
|
2034 while (*p)
|
|
2035 recog_data.n_alternatives += (*p++ == ',');
|
|
2036 }
|
|
2037 break;
|
|
2038 }
|
|
2039 fatal_insn_not_found (insn);
|
|
2040
|
|
2041 default:
|
|
2042 normal_insn:
|
|
2043 /* Ordinary insn: recognize it, get the operands via insn_extract
|
|
2044 and get the constraints. */
|
|
2045
|
|
2046 icode = recog_memoized (insn);
|
|
2047 if (icode < 0)
|
|
2048 fatal_insn_not_found (insn);
|
|
2049
|
|
2050 recog_data.n_operands = noperands = insn_data[icode].n_operands;
|
|
2051 recog_data.n_alternatives = insn_data[icode].n_alternatives;
|
|
2052 recog_data.n_dups = insn_data[icode].n_dups;
|
|
2053
|
|
2054 insn_extract (insn);
|
|
2055
|
|
2056 for (i = 0; i < noperands; i++)
|
|
2057 {
|
|
2058 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
|
|
2059 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
|
|
2060 /* VOIDmode match_operands gets mode from their real operand. */
|
|
2061 if (recog_data.operand_mode[i] == VOIDmode)
|
|
2062 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
|
|
2063 }
|
|
2064 }
|
|
2065 for (i = 0; i < noperands; i++)
|
|
2066 recog_data.operand_type[i]
|
|
2067 = (recog_data.constraints[i][0] == '=' ? OP_OUT
|
|
2068 : recog_data.constraints[i][0] == '+' ? OP_INOUT
|
|
2069 : OP_IN);
|
|
2070
|
|
2071 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
|
|
2072
|
|
2073 if (INSN_CODE (insn) < 0)
|
|
2074 for (i = 0; i < recog_data.n_alternatives; i++)
|
|
2075 recog_data.alternative_enabled_p[i] = true;
|
|
2076 else
|
|
2077 {
|
|
2078 recog_data.insn = insn;
|
|
2079 for (i = 0; i < recog_data.n_alternatives; i++)
|
|
2080 {
|
|
2081 which_alternative = i;
|
|
2082 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
|
|
2083 }
|
|
2084 }
|
|
2085
|
|
2086 recog_data.insn = NULL;
|
|
2087 which_alternative = -1;
|
|
2088 }
|
|
2089
|
|
2090 /* After calling extract_insn, you can use this function to extract some
|
|
2091 information from the constraint strings into a more usable form.
|
|
2092 The collected data is stored in recog_op_alt. */
|
|
2093 void
|
|
2094 preprocess_constraints (void)
|
|
2095 {
|
|
2096 int i;
|
|
2097
|
|
2098 for (i = 0; i < recog_data.n_operands; i++)
|
|
2099 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
|
|
2100 * sizeof (struct operand_alternative)));
|
|
2101
|
|
2102 for (i = 0; i < recog_data.n_operands; i++)
|
|
2103 {
|
|
2104 int j;
|
|
2105 struct operand_alternative *op_alt;
|
|
2106 const char *p = recog_data.constraints[i];
|
|
2107
|
|
2108 op_alt = recog_op_alt[i];
|
|
2109
|
|
2110 for (j = 0; j < recog_data.n_alternatives; j++)
|
|
2111 {
|
|
2112 op_alt[j].cl = NO_REGS;
|
|
2113 op_alt[j].constraint = p;
|
|
2114 op_alt[j].matches = -1;
|
|
2115 op_alt[j].matched = -1;
|
|
2116
|
|
2117 if (!recog_data.alternative_enabled_p[j])
|
|
2118 {
|
|
2119 p = skip_alternative (p);
|
|
2120 continue;
|
|
2121 }
|
|
2122
|
|
2123 if (*p == '\0' || *p == ',')
|
|
2124 {
|
|
2125 op_alt[j].anything_ok = 1;
|
|
2126 continue;
|
|
2127 }
|
|
2128
|
|
2129 for (;;)
|
|
2130 {
|
|
2131 char c = *p;
|
|
2132 if (c == '#')
|
|
2133 do
|
|
2134 c = *++p;
|
|
2135 while (c != ',' && c != '\0');
|
|
2136 if (c == ',' || c == '\0')
|
|
2137 {
|
|
2138 p++;
|
|
2139 break;
|
|
2140 }
|
|
2141
|
|
2142 switch (c)
|
|
2143 {
|
|
2144 case '=': case '+': case '*': case '%':
|
|
2145 case 'E': case 'F': case 'G': case 'H':
|
|
2146 case 's': case 'i': case 'n':
|
|
2147 case 'I': case 'J': case 'K': case 'L':
|
|
2148 case 'M': case 'N': case 'O': case 'P':
|
|
2149 /* These don't say anything we care about. */
|
|
2150 break;
|
|
2151
|
|
2152 case '?':
|
|
2153 op_alt[j].reject += 6;
|
|
2154 break;
|
|
2155 case '!':
|
|
2156 op_alt[j].reject += 600;
|
|
2157 break;
|
|
2158 case '&':
|
|
2159 op_alt[j].earlyclobber = 1;
|
|
2160 break;
|
|
2161
|
|
2162 case '0': case '1': case '2': case '3': case '4':
|
|
2163 case '5': case '6': case '7': case '8': case '9':
|
|
2164 {
|
|
2165 char *end;
|
|
2166 op_alt[j].matches = strtoul (p, &end, 10);
|
|
2167 recog_op_alt[op_alt[j].matches][j].matched = i;
|
|
2168 p = end;
|
|
2169 }
|
|
2170 continue;
|
|
2171
|
|
2172 case TARGET_MEM_CONSTRAINT:
|
|
2173 op_alt[j].memory_ok = 1;
|
|
2174 break;
|
|
2175 case '<':
|
|
2176 op_alt[j].decmem_ok = 1;
|
|
2177 break;
|
|
2178 case '>':
|
|
2179 op_alt[j].incmem_ok = 1;
|
|
2180 break;
|
|
2181 case 'V':
|
|
2182 op_alt[j].nonoffmem_ok = 1;
|
|
2183 break;
|
|
2184 case 'o':
|
|
2185 op_alt[j].offmem_ok = 1;
|
|
2186 break;
|
|
2187 case 'X':
|
|
2188 op_alt[j].anything_ok = 1;
|
|
2189 break;
|
|
2190
|
|
2191 case 'p':
|
|
2192 op_alt[j].is_address = 1;
|
|
2193 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
|
|
2194 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
|
|
2195 break;
|
|
2196
|
|
2197 case 'g':
|
|
2198 case 'r':
|
|
2199 op_alt[j].cl =
|
|
2200 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
|
|
2201 break;
|
|
2202
|
|
2203 default:
|
|
2204 if (EXTRA_MEMORY_CONSTRAINT (c, p))
|
|
2205 {
|
|
2206 op_alt[j].memory_ok = 1;
|
|
2207 break;
|
|
2208 }
|
|
2209 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
|
|
2210 {
|
|
2211 op_alt[j].is_address = 1;
|
|
2212 op_alt[j].cl
|
|
2213 = (reg_class_subunion
|
|
2214 [(int) op_alt[j].cl]
|
|
2215 [(int) base_reg_class (VOIDmode, ADDRESS,
|
|
2216 SCRATCH)]);
|
|
2217 break;
|
|
2218 }
|
|
2219
|
|
2220 op_alt[j].cl
|
|
2221 = (reg_class_subunion
|
|
2222 [(int) op_alt[j].cl]
|
|
2223 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
|
|
2224 break;
|
|
2225 }
|
|
2226 p += CONSTRAINT_LEN (c, p);
|
|
2227 }
|
|
2228 }
|
|
2229 }
|
|
2230 }
|
|
2231
|
|
2232 /* Check the operands of an insn against the insn's operand constraints
|
|
2233 and return 1 if they are valid.
|
|
2234 The information about the insn's operands, constraints, operand modes
|
|
2235 etc. is obtained from the global variables set up by extract_insn.
|
|
2236
|
|
2237 WHICH_ALTERNATIVE is set to a number which indicates which
|
|
2238 alternative of constraints was matched: 0 for the first alternative,
|
|
2239 1 for the next, etc.
|
|
2240
|
|
2241 In addition, when two operands are required to match
|
|
2242 and it happens that the output operand is (reg) while the
|
|
2243 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
|
|
2244 make the output operand look like the input.
|
|
2245 This is because the output operand is the one the template will print.
|
|
2246
|
|
2247 This is used in final, just before printing the assembler code and by
|
|
2248 the routines that determine an insn's attribute.
|
|
2249
|
|
2250 If STRICT is a positive nonzero value, it means that we have been
|
|
2251 called after reload has been completed. In that case, we must
|
|
2252 do all checks strictly. If it is zero, it means that we have been called
|
|
2253 before reload has completed. In that case, we first try to see if we can
|
|
2254 find an alternative that matches strictly. If not, we try again, this
|
|
2255 time assuming that reload will fix up the insn. This provides a "best
|
|
2256 guess" for the alternative and is used to compute attributes of insns prior
|
|
2257 to reload. A negative value of STRICT is used for this internal call. */
|
|
2258
|
|
2259 struct funny_match
|
|
2260 {
|
|
2261 int this_op, other;
|
|
2262 };
|
|
2263
|
|
2264 int
|
|
2265 constrain_operands (int strict)
|
|
2266 {
|
|
2267 const char *constraints[MAX_RECOG_OPERANDS];
|
|
2268 int matching_operands[MAX_RECOG_OPERANDS];
|
|
2269 int earlyclobber[MAX_RECOG_OPERANDS];
|
|
2270 int c;
|
|
2271
|
|
2272 struct funny_match funny_match[MAX_RECOG_OPERANDS];
|
|
2273 int funny_match_index;
|
|
2274
|
|
2275 which_alternative = 0;
|
|
2276 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
|
|
2277 return 1;
|
|
2278
|
|
2279 for (c = 0; c < recog_data.n_operands; c++)
|
|
2280 {
|
|
2281 constraints[c] = recog_data.constraints[c];
|
|
2282 matching_operands[c] = -1;
|
|
2283 }
|
|
2284
|
|
2285 do
|
|
2286 {
|
|
2287 int seen_earlyclobber_at = -1;
|
|
2288 int opno;
|
|
2289 int lose = 0;
|
|
2290 funny_match_index = 0;
|
|
2291
|
|
2292 if (!recog_data.alternative_enabled_p[which_alternative])
|
|
2293 {
|
|
2294 int i;
|
|
2295
|
|
2296 for (i = 0; i < recog_data.n_operands; i++)
|
|
2297 constraints[i] = skip_alternative (constraints[i]);
|
|
2298
|
|
2299 which_alternative++;
|
|
2300 continue;
|
|
2301 }
|
|
2302
|
|
2303 for (opno = 0; opno < recog_data.n_operands; opno++)
|
|
2304 {
|
|
2305 rtx op = recog_data.operand[opno];
|
|
2306 enum machine_mode mode = GET_MODE (op);
|
|
2307 const char *p = constraints[opno];
|
|
2308 int offset = 0;
|
|
2309 int win = 0;
|
|
2310 int val;
|
|
2311 int len;
|
|
2312
|
|
2313 earlyclobber[opno] = 0;
|
|
2314
|
|
2315 /* A unary operator may be accepted by the predicate, but it
|
|
2316 is irrelevant for matching constraints. */
|
|
2317 if (UNARY_P (op))
|
|
2318 op = XEXP (op, 0);
|
|
2319
|
|
2320 if (GET_CODE (op) == SUBREG)
|
|
2321 {
|
|
2322 if (REG_P (SUBREG_REG (op))
|
|
2323 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
|
|
2324 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
|
|
2325 GET_MODE (SUBREG_REG (op)),
|
|
2326 SUBREG_BYTE (op),
|
|
2327 GET_MODE (op));
|
|
2328 op = SUBREG_REG (op);
|
|
2329 }
|
|
2330
|
|
2331 /* An empty constraint or empty alternative
|
|
2332 allows anything which matched the pattern. */
|
|
2333 if (*p == 0 || *p == ',')
|
|
2334 win = 1;
|
|
2335
|
|
2336 do
|
|
2337 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
|
|
2338 {
|
|
2339 case '\0':
|
|
2340 len = 0;
|
|
2341 break;
|
|
2342 case ',':
|
|
2343 c = '\0';
|
|
2344 break;
|
|
2345
|
|
2346 case '?': case '!': case '*': case '%':
|
|
2347 case '=': case '+':
|
|
2348 break;
|
|
2349
|
|
2350 case '#':
|
|
2351 /* Ignore rest of this alternative as far as
|
|
2352 constraint checking is concerned. */
|
|
2353 do
|
|
2354 p++;
|
|
2355 while (*p && *p != ',');
|
|
2356 len = 0;
|
|
2357 break;
|
|
2358
|
|
2359 case '&':
|
|
2360 earlyclobber[opno] = 1;
|
|
2361 if (seen_earlyclobber_at < 0)
|
|
2362 seen_earlyclobber_at = opno;
|
|
2363 break;
|
|
2364
|
|
2365 case '0': case '1': case '2': case '3': case '4':
|
|
2366 case '5': case '6': case '7': case '8': case '9':
|
|
2367 {
|
|
2368 /* This operand must be the same as a previous one.
|
|
2369 This kind of constraint is used for instructions such
|
|
2370 as add when they take only two operands.
|
|
2371
|
|
2372 Note that the lower-numbered operand is passed first.
|
|
2373
|
|
2374 If we are not testing strictly, assume that this
|
|
2375 constraint will be satisfied. */
|
|
2376
|
|
2377 char *end;
|
|
2378 int match;
|
|
2379
|
|
2380 match = strtoul (p, &end, 10);
|
|
2381 p = end;
|
|
2382
|
|
2383 if (strict < 0)
|
|
2384 val = 1;
|
|
2385 else
|
|
2386 {
|
|
2387 rtx op1 = recog_data.operand[match];
|
|
2388 rtx op2 = recog_data.operand[opno];
|
|
2389
|
|
2390 /* A unary operator may be accepted by the predicate,
|
|
2391 but it is irrelevant for matching constraints. */
|
|
2392 if (UNARY_P (op1))
|
|
2393 op1 = XEXP (op1, 0);
|
|
2394 if (UNARY_P (op2))
|
|
2395 op2 = XEXP (op2, 0);
|
|
2396
|
|
2397 val = operands_match_p (op1, op2);
|
|
2398 }
|
|
2399
|
|
2400 matching_operands[opno] = match;
|
|
2401 matching_operands[match] = opno;
|
|
2402
|
|
2403 if (val != 0)
|
|
2404 win = 1;
|
|
2405
|
|
2406 /* If output is *x and input is *--x, arrange later
|
|
2407 to change the output to *--x as well, since the
|
|
2408 output op is the one that will be printed. */
|
|
2409 if (val == 2 && strict > 0)
|
|
2410 {
|
|
2411 funny_match[funny_match_index].this_op = opno;
|
|
2412 funny_match[funny_match_index++].other = match;
|
|
2413 }
|
|
2414 }
|
|
2415 len = 0;
|
|
2416 break;
|
|
2417
|
|
2418 case 'p':
|
|
2419 /* p is used for address_operands. When we are called by
|
|
2420 gen_reload, no one will have checked that the address is
|
|
2421 strictly valid, i.e., that all pseudos requiring hard regs
|
|
2422 have gotten them. */
|
|
2423 if (strict <= 0
|
|
2424 || (strict_memory_address_p (recog_data.operand_mode[opno],
|
|
2425 op)))
|
|
2426 win = 1;
|
|
2427 break;
|
|
2428
|
|
2429 /* No need to check general_operand again;
|
|
2430 it was done in insn-recog.c. Well, except that reload
|
|
2431 doesn't check the validity of its replacements, but
|
|
2432 that should only matter when there's a bug. */
|
|
2433 case 'g':
|
|
2434 /* Anything goes unless it is a REG and really has a hard reg
|
|
2435 but the hard reg is not in the class GENERAL_REGS. */
|
|
2436 if (REG_P (op))
|
|
2437 {
|
|
2438 if (strict < 0
|
|
2439 || GENERAL_REGS == ALL_REGS
|
|
2440 || (reload_in_progress
|
|
2441 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
|
|
2442 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
|
|
2443 win = 1;
|
|
2444 }
|
|
2445 else if (strict < 0 || general_operand (op, mode))
|
|
2446 win = 1;
|
|
2447 break;
|
|
2448
|
|
2449 case 'X':
|
|
2450 /* This is used for a MATCH_SCRATCH in the cases when
|
|
2451 we don't actually need anything. So anything goes
|
|
2452 any time. */
|
|
2453 win = 1;
|
|
2454 break;
|
|
2455
|
|
2456 case TARGET_MEM_CONSTRAINT:
|
|
2457 /* Memory operands must be valid, to the extent
|
|
2458 required by STRICT. */
|
|
2459 if (MEM_P (op))
|
|
2460 {
|
|
2461 if (strict > 0
|
|
2462 && !strict_memory_address_p (GET_MODE (op),
|
|
2463 XEXP (op, 0)))
|
|
2464 break;
|
|
2465 if (strict == 0
|
|
2466 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
|
|
2467 break;
|
|
2468 win = 1;
|
|
2469 }
|
|
2470 /* Before reload, accept what reload can turn into mem. */
|
|
2471 else if (strict < 0 && CONSTANT_P (op))
|
|
2472 win = 1;
|
|
2473 /* During reload, accept a pseudo */
|
|
2474 else if (reload_in_progress && REG_P (op)
|
|
2475 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
|
|
2476 win = 1;
|
|
2477 break;
|
|
2478
|
|
2479 case '<':
|
|
2480 if (MEM_P (op)
|
|
2481 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
|
|
2482 || GET_CODE (XEXP (op, 0)) == POST_DEC))
|
|
2483 win = 1;
|
|
2484 break;
|
|
2485
|
|
2486 case '>':
|
|
2487 if (MEM_P (op)
|
|
2488 && (GET_CODE (XEXP (op, 0)) == PRE_INC
|
|
2489 || GET_CODE (XEXP (op, 0)) == POST_INC))
|
|
2490 win = 1;
|
|
2491 break;
|
|
2492
|
|
2493 case 'E':
|
|
2494 case 'F':
|
|
2495 if (GET_CODE (op) == CONST_DOUBLE
|
|
2496 || (GET_CODE (op) == CONST_VECTOR
|
|
2497 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
|
|
2498 win = 1;
|
|
2499 break;
|
|
2500
|
|
2501 case 'G':
|
|
2502 case 'H':
|
|
2503 if (GET_CODE (op) == CONST_DOUBLE
|
|
2504 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
|
|
2505 win = 1;
|
|
2506 break;
|
|
2507
|
|
2508 case 's':
|
|
2509 if (GET_CODE (op) == CONST_INT
|
|
2510 || (GET_CODE (op) == CONST_DOUBLE
|
|
2511 && GET_MODE (op) == VOIDmode))
|
|
2512 break;
|
|
2513 case 'i':
|
|
2514 if (CONSTANT_P (op))
|
|
2515 win = 1;
|
|
2516 break;
|
|
2517
|
|
2518 case 'n':
|
|
2519 if (GET_CODE (op) == CONST_INT
|
|
2520 || (GET_CODE (op) == CONST_DOUBLE
|
|
2521 && GET_MODE (op) == VOIDmode))
|
|
2522 win = 1;
|
|
2523 break;
|
|
2524
|
|
2525 case 'I':
|
|
2526 case 'J':
|
|
2527 case 'K':
|
|
2528 case 'L':
|
|
2529 case 'M':
|
|
2530 case 'N':
|
|
2531 case 'O':
|
|
2532 case 'P':
|
|
2533 if (GET_CODE (op) == CONST_INT
|
|
2534 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
|
|
2535 win = 1;
|
|
2536 break;
|
|
2537
|
|
2538 case 'V':
|
|
2539 if (MEM_P (op)
|
|
2540 && ((strict > 0 && ! offsettable_memref_p (op))
|
|
2541 || (strict < 0
|
|
2542 && !(CONSTANT_P (op) || MEM_P (op)))
|
|
2543 || (reload_in_progress
|
|
2544 && !(REG_P (op)
|
|
2545 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
|
|
2546 win = 1;
|
|
2547 break;
|
|
2548
|
|
2549 case 'o':
|
|
2550 if ((strict > 0 && offsettable_memref_p (op))
|
|
2551 || (strict == 0 && offsettable_nonstrict_memref_p (op))
|
|
2552 /* Before reload, accept what reload can handle. */
|
|
2553 || (strict < 0
|
|
2554 && (CONSTANT_P (op) || MEM_P (op)))
|
|
2555 /* During reload, accept a pseudo */
|
|
2556 || (reload_in_progress && REG_P (op)
|
|
2557 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
|
|
2558 win = 1;
|
|
2559 break;
|
|
2560
|
|
2561 default:
|
|
2562 {
|
|
2563 enum reg_class cl;
|
|
2564
|
|
2565 cl = (c == 'r'
|
|
2566 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
|
|
2567 if (cl != NO_REGS)
|
|
2568 {
|
|
2569 if (strict < 0
|
|
2570 || (strict == 0
|
|
2571 && REG_P (op)
|
|
2572 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
|
|
2573 || (strict == 0 && GET_CODE (op) == SCRATCH)
|
|
2574 || (REG_P (op)
|
|
2575 && reg_fits_class_p (op, cl, offset, mode)))
|
|
2576 win = 1;
|
|
2577 }
|
|
2578 #ifdef EXTRA_CONSTRAINT_STR
|
|
2579 else if (EXTRA_CONSTRAINT_STR (op, c, p))
|
|
2580 win = 1;
|
|
2581
|
|
2582 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
|
|
2583 /* Every memory operand can be reloaded to fit. */
|
|
2584 && ((strict < 0 && MEM_P (op))
|
|
2585 /* Before reload, accept what reload can turn
|
|
2586 into mem. */
|
|
2587 || (strict < 0 && CONSTANT_P (op))
|
|
2588 /* During reload, accept a pseudo */
|
|
2589 || (reload_in_progress && REG_P (op)
|
|
2590 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
|
|
2591 win = 1;
|
|
2592 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
|
|
2593 /* Every address operand can be reloaded to fit. */
|
|
2594 && strict < 0)
|
|
2595 win = 1;
|
|
2596 #endif
|
|
2597 break;
|
|
2598 }
|
|
2599 }
|
|
2600 while (p += len, c);
|
|
2601
|
|
2602 constraints[opno] = p;
|
|
2603 /* If this operand did not win somehow,
|
|
2604 this alternative loses. */
|
|
2605 if (! win)
|
|
2606 lose = 1;
|
|
2607 }
|
|
2608 /* This alternative won; the operands are ok.
|
|
2609 Change whichever operands this alternative says to change. */
|
|
2610 if (! lose)
|
|
2611 {
|
|
2612 int opno, eopno;
|
|
2613
|
|
2614 /* See if any earlyclobber operand conflicts with some other
|
|
2615 operand. */
|
|
2616
|
|
2617 if (strict > 0 && seen_earlyclobber_at >= 0)
|
|
2618 for (eopno = seen_earlyclobber_at;
|
|
2619 eopno < recog_data.n_operands;
|
|
2620 eopno++)
|
|
2621 /* Ignore earlyclobber operands now in memory,
|
|
2622 because we would often report failure when we have
|
|
2623 two memory operands, one of which was formerly a REG. */
|
|
2624 if (earlyclobber[eopno]
|
|
2625 && REG_P (recog_data.operand[eopno]))
|
|
2626 for (opno = 0; opno < recog_data.n_operands; opno++)
|
|
2627 if ((MEM_P (recog_data.operand[opno])
|
|
2628 || recog_data.operand_type[opno] != OP_OUT)
|
|
2629 && opno != eopno
|
|
2630 /* Ignore things like match_operator operands. */
|
|
2631 && *recog_data.constraints[opno] != 0
|
|
2632 && ! (matching_operands[opno] == eopno
|
|
2633 && operands_match_p (recog_data.operand[opno],
|
|
2634 recog_data.operand[eopno]))
|
|
2635 && ! safe_from_earlyclobber (recog_data.operand[opno],
|
|
2636 recog_data.operand[eopno]))
|
|
2637 lose = 1;
|
|
2638
|
|
2639 if (! lose)
|
|
2640 {
|
|
2641 while (--funny_match_index >= 0)
|
|
2642 {
|
|
2643 recog_data.operand[funny_match[funny_match_index].other]
|
|
2644 = recog_data.operand[funny_match[funny_match_index].this_op];
|
|
2645 }
|
|
2646
|
|
2647 return 1;
|
|
2648 }
|
|
2649 }
|
|
2650
|
|
2651 which_alternative++;
|
|
2652 }
|
|
2653 while (which_alternative < recog_data.n_alternatives);
|
|
2654
|
|
2655 which_alternative = -1;
|
|
2656 /* If we are about to reject this, but we are not to test strictly,
|
|
2657 try a very loose test. Only return failure if it fails also. */
|
|
2658 if (strict == 0)
|
|
2659 return constrain_operands (-1);
|
|
2660 else
|
|
2661 return 0;
|
|
2662 }
|
|
2663
|
|
2664 /* Return 1 iff OPERAND (assumed to be a REG rtx)
|
|
2665 is a hard reg in class CLASS when its regno is offset by OFFSET
|
|
2666 and changed to mode MODE.
|
|
2667 If REG occupies multiple hard regs, all of them must be in CLASS. */
|
|
2668
|
|
2669 int
|
|
2670 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
|
|
2671 enum machine_mode mode)
|
|
2672 {
|
|
2673 int regno = REGNO (operand);
|
|
2674
|
|
2675 if (cl == NO_REGS)
|
|
2676 return 0;
|
|
2677
|
|
2678 return (regno < FIRST_PSEUDO_REGISTER
|
|
2679 && in_hard_reg_set_p (reg_class_contents[(int) cl],
|
|
2680 mode, regno + offset));
|
|
2681 }
|
|
2682
|
|
2683 /* Split single instruction. Helper function for split_all_insns and
|
|
2684 split_all_insns_noflow. Return last insn in the sequence if successful,
|
|
2685 or NULL if unsuccessful. */
|
|
2686
|
|
2687 static rtx
|
|
2688 split_insn (rtx insn)
|
|
2689 {
|
|
2690 /* Split insns here to get max fine-grain parallelism. */
|
|
2691 rtx first = PREV_INSN (insn);
|
|
2692 rtx last = try_split (PATTERN (insn), insn, 1);
|
|
2693 rtx insn_set, last_set, note;
|
|
2694
|
|
2695 if (last == insn)
|
|
2696 return NULL_RTX;
|
|
2697
|
|
2698 /* If the original instruction was a single set that was known to be
|
|
2699 equivalent to a constant, see if we can say the same about the last
|
|
2700 instruction in the split sequence. The two instructions must set
|
|
2701 the same destination. */
|
|
2702 insn_set = single_set (insn);
|
|
2703 if (insn_set)
|
|
2704 {
|
|
2705 last_set = single_set (last);
|
|
2706 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
|
|
2707 {
|
|
2708 note = find_reg_equal_equiv_note (insn);
|
|
2709 if (note && CONSTANT_P (XEXP (note, 0)))
|
|
2710 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
|
|
2711 else if (CONSTANT_P (SET_SRC (insn_set)))
|
|
2712 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
|
|
2713 }
|
|
2714 }
|
|
2715
|
|
2716 /* try_split returns the NOTE that INSN became. */
|
|
2717 SET_INSN_DELETED (insn);
|
|
2718
|
|
2719 /* ??? Coddle to md files that generate subregs in post-reload
|
|
2720 splitters instead of computing the proper hard register. */
|
|
2721 if (reload_completed && first != last)
|
|
2722 {
|
|
2723 first = NEXT_INSN (first);
|
|
2724 for (;;)
|
|
2725 {
|
|
2726 if (INSN_P (first))
|
|
2727 cleanup_subreg_operands (first);
|
|
2728 if (first == last)
|
|
2729 break;
|
|
2730 first = NEXT_INSN (first);
|
|
2731 }
|
|
2732 }
|
|
2733
|
|
2734 return last;
|
|
2735 }
|
|
2736
|
|
2737 /* Split all insns in the function. If UPD_LIFE, update life info after. */
|
|
2738
|
|
2739 void
|
|
2740 split_all_insns (void)
|
|
2741 {
|
|
2742 sbitmap blocks;
|
|
2743 bool changed;
|
|
2744 basic_block bb;
|
|
2745
|
|
2746 blocks = sbitmap_alloc (last_basic_block);
|
|
2747 sbitmap_zero (blocks);
|
|
2748 changed = false;
|
|
2749
|
|
2750 FOR_EACH_BB_REVERSE (bb)
|
|
2751 {
|
|
2752 rtx insn, next;
|
|
2753 bool finish = false;
|
|
2754
|
|
2755 rtl_profile_for_bb (bb);
|
|
2756 for (insn = BB_HEAD (bb); !finish ; insn = next)
|
|
2757 {
|
|
2758 /* Can't use `next_real_insn' because that might go across
|
|
2759 CODE_LABELS and short-out basic blocks. */
|
|
2760 next = NEXT_INSN (insn);
|
|
2761 finish = (insn == BB_END (bb));
|
|
2762 if (INSN_P (insn))
|
|
2763 {
|
|
2764 rtx set = single_set (insn);
|
|
2765
|
|
2766 /* Don't split no-op move insns. These should silently
|
|
2767 disappear later in final. Splitting such insns would
|
|
2768 break the code that handles LIBCALL blocks. */
|
|
2769 if (set && set_noop_p (set))
|
|
2770 {
|
|
2771 /* Nops get in the way while scheduling, so delete them
|
|
2772 now if register allocation has already been done. It
|
|
2773 is too risky to try to do this before register
|
|
2774 allocation, and there are unlikely to be very many
|
|
2775 nops then anyways. */
|
|
2776 if (reload_completed)
|
|
2777 delete_insn_and_edges (insn);
|
|
2778 }
|
|
2779 else
|
|
2780 {
|
|
2781 rtx last = split_insn (insn);
|
|
2782 if (last)
|
|
2783 {
|
|
2784 /* The split sequence may include barrier, but the
|
|
2785 BB boundary we are interested in will be set to
|
|
2786 previous one. */
|
|
2787
|
|
2788 while (BARRIER_P (last))
|
|
2789 last = PREV_INSN (last);
|
|
2790 SET_BIT (blocks, bb->index);
|
|
2791 changed = true;
|
|
2792 }
|
|
2793 }
|
|
2794 }
|
|
2795 }
|
|
2796 }
|
|
2797
|
|
2798 default_rtl_profile ();
|
|
2799 if (changed)
|
|
2800 find_many_sub_basic_blocks (blocks);
|
|
2801
|
|
2802 #ifdef ENABLE_CHECKING
|
|
2803 verify_flow_info ();
|
|
2804 #endif
|
|
2805
|
|
2806 sbitmap_free (blocks);
|
|
2807 }
|
|
2808
|
|
2809 /* Same as split_all_insns, but do not expect CFG to be available.
|
|
2810 Used by machine dependent reorg passes. */
|
|
2811
|
|
2812 unsigned int
|
|
2813 split_all_insns_noflow (void)
|
|
2814 {
|
|
2815 rtx next, insn;
|
|
2816
|
|
2817 for (insn = get_insns (); insn; insn = next)
|
|
2818 {
|
|
2819 next = NEXT_INSN (insn);
|
|
2820 if (INSN_P (insn))
|
|
2821 {
|
|
2822 /* Don't split no-op move insns. These should silently
|
|
2823 disappear later in final. Splitting such insns would
|
|
2824 break the code that handles LIBCALL blocks. */
|
|
2825 rtx set = single_set (insn);
|
|
2826 if (set && set_noop_p (set))
|
|
2827 {
|
|
2828 /* Nops get in the way while scheduling, so delete them
|
|
2829 now if register allocation has already been done. It
|
|
2830 is too risky to try to do this before register
|
|
2831 allocation, and there are unlikely to be very many
|
|
2832 nops then anyways.
|
|
2833
|
|
2834 ??? Should we use delete_insn when the CFG isn't valid? */
|
|
2835 if (reload_completed)
|
|
2836 delete_insn_and_edges (insn);
|
|
2837 }
|
|
2838 else
|
|
2839 split_insn (insn);
|
|
2840 }
|
|
2841 }
|
|
2842 return 0;
|
|
2843 }
|
|
2844
|
|
2845 #ifdef HAVE_peephole2
|
|
2846 struct peep2_insn_data
|
|
2847 {
|
|
2848 rtx insn;
|
|
2849 regset live_before;
|
|
2850 };
|
|
2851
|
|
2852 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
|
|
2853 static int peep2_current;
|
|
2854 /* The number of instructions available to match a peep2. */
|
|
2855 int peep2_current_count;
|
|
2856
|
|
2857 /* A non-insn marker indicating the last insn of the block.
|
|
2858 The live_before regset for this element is correct, indicating
|
|
2859 DF_LIVE_OUT for the block. */
|
|
2860 #define PEEP2_EOB pc_rtx
|
|
2861
|
|
2862 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
|
|
2863 does not exist. Used by the recognizer to find the next insn to match
|
|
2864 in a multi-insn pattern. */
|
|
2865
|
|
2866 rtx
|
|
2867 peep2_next_insn (int n)
|
|
2868 {
|
|
2869 gcc_assert (n <= peep2_current_count);
|
|
2870
|
|
2871 n += peep2_current;
|
|
2872 if (n >= MAX_INSNS_PER_PEEP2 + 1)
|
|
2873 n -= MAX_INSNS_PER_PEEP2 + 1;
|
|
2874
|
|
2875 return peep2_insn_data[n].insn;
|
|
2876 }
|
|
2877
|
|
2878 /* Return true if REGNO is dead before the Nth non-note insn
|
|
2879 after `current'. */
|
|
2880
|
|
2881 int
|
|
2882 peep2_regno_dead_p (int ofs, int regno)
|
|
2883 {
|
|
2884 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
|
|
2885
|
|
2886 ofs += peep2_current;
|
|
2887 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
|
|
2888 ofs -= MAX_INSNS_PER_PEEP2 + 1;
|
|
2889
|
|
2890 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
|
|
2891
|
|
2892 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
|
|
2893 }
|
|
2894
|
|
2895 /* Similarly for a REG. */
|
|
2896
|
|
2897 int
|
|
2898 peep2_reg_dead_p (int ofs, rtx reg)
|
|
2899 {
|
|
2900 int regno, n;
|
|
2901
|
|
2902 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
|
|
2903
|
|
2904 ofs += peep2_current;
|
|
2905 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
|
|
2906 ofs -= MAX_INSNS_PER_PEEP2 + 1;
|
|
2907
|
|
2908 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
|
|
2909
|
|
2910 regno = REGNO (reg);
|
|
2911 n = hard_regno_nregs[regno][GET_MODE (reg)];
|
|
2912 while (--n >= 0)
|
|
2913 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
|
|
2914 return 0;
|
|
2915 return 1;
|
|
2916 }
|
|
2917
|
|
2918 /* Try to find a hard register of mode MODE, matching the register class in
|
|
2919 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
|
|
2920 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
|
|
2921 in which case the only condition is that the register must be available
|
|
2922 before CURRENT_INSN.
|
|
2923 Registers that already have bits set in REG_SET will not be considered.
|
|
2924
|
|
2925 If an appropriate register is available, it will be returned and the
|
|
2926 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
|
|
2927 returned. */
|
|
2928
|
|
2929 rtx
|
|
2930 peep2_find_free_register (int from, int to, const char *class_str,
|
|
2931 enum machine_mode mode, HARD_REG_SET *reg_set)
|
|
2932 {
|
|
2933 static int search_ofs;
|
|
2934 enum reg_class cl;
|
|
2935 HARD_REG_SET live;
|
|
2936 int i;
|
|
2937
|
|
2938 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
|
|
2939 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
|
|
2940
|
|
2941 from += peep2_current;
|
|
2942 if (from >= MAX_INSNS_PER_PEEP2 + 1)
|
|
2943 from -= MAX_INSNS_PER_PEEP2 + 1;
|
|
2944 to += peep2_current;
|
|
2945 if (to >= MAX_INSNS_PER_PEEP2 + 1)
|
|
2946 to -= MAX_INSNS_PER_PEEP2 + 1;
|
|
2947
|
|
2948 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
|
|
2949 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
|
|
2950
|
|
2951 while (from != to)
|
|
2952 {
|
|
2953 HARD_REG_SET this_live;
|
|
2954
|
|
2955 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
|
|
2956 from = 0;
|
|
2957 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
|
|
2958 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
|
|
2959 IOR_HARD_REG_SET (live, this_live);
|
|
2960 }
|
|
2961
|
|
2962 cl = (class_str[0] == 'r' ? GENERAL_REGS
|
|
2963 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
|
|
2964
|
|
2965 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
|
2966 {
|
|
2967 int raw_regno, regno, success, j;
|
|
2968
|
|
2969 /* Distribute the free registers as much as possible. */
|
|
2970 raw_regno = search_ofs + i;
|
|
2971 if (raw_regno >= FIRST_PSEUDO_REGISTER)
|
|
2972 raw_regno -= FIRST_PSEUDO_REGISTER;
|
|
2973 #ifdef REG_ALLOC_ORDER
|
|
2974 regno = reg_alloc_order[raw_regno];
|
|
2975 #else
|
|
2976 regno = raw_regno;
|
|
2977 #endif
|
|
2978
|
|
2979 /* Don't allocate fixed registers. */
|
|
2980 if (fixed_regs[regno])
|
|
2981 continue;
|
|
2982 /* Don't allocate global registers. */
|
|
2983 if (global_regs[regno])
|
|
2984 continue;
|
|
2985 /* Make sure the register is of the right class. */
|
|
2986 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
|
|
2987 continue;
|
|
2988 /* And can support the mode we need. */
|
|
2989 if (! HARD_REGNO_MODE_OK (regno, mode))
|
|
2990 continue;
|
|
2991 /* And that we don't create an extra save/restore. */
|
|
2992 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
|
|
2993 continue;
|
|
2994 if (! targetm.hard_regno_scratch_ok (regno))
|
|
2995 continue;
|
|
2996
|
|
2997 /* And we don't clobber traceback for noreturn functions. */
|
|
2998 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
|
|
2999 && (! reload_completed || frame_pointer_needed))
|
|
3000 continue;
|
|
3001
|
|
3002 success = 1;
|
|
3003 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
|
|
3004 {
|
|
3005 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
|
|
3006 || TEST_HARD_REG_BIT (live, regno + j))
|
|
3007 {
|
|
3008 success = 0;
|
|
3009 break;
|
|
3010 }
|
|
3011 }
|
|
3012 if (success)
|
|
3013 {
|
|
3014 add_to_hard_reg_set (reg_set, mode, regno);
|
|
3015
|
|
3016 /* Start the next search with the next register. */
|
|
3017 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
|
|
3018 raw_regno = 0;
|
|
3019 search_ofs = raw_regno;
|
|
3020
|
|
3021 return gen_rtx_REG (mode, regno);
|
|
3022 }
|
|
3023 }
|
|
3024
|
|
3025 search_ofs = 0;
|
|
3026 return NULL_RTX;
|
|
3027 }
|
|
3028
|
|
3029 /* Perform the peephole2 optimization pass. */
|
|
3030
|
|
3031 static void
|
|
3032 peephole2_optimize (void)
|
|
3033 {
|
|
3034 rtx insn, prev;
|
|
3035 bitmap live;
|
|
3036 int i;
|
|
3037 basic_block bb;
|
|
3038 bool do_cleanup_cfg = false;
|
|
3039 bool do_rebuild_jump_labels = false;
|
|
3040
|
|
3041 df_set_flags (DF_LR_RUN_DCE);
|
|
3042 df_analyze ();
|
|
3043
|
|
3044 /* Initialize the regsets we're going to use. */
|
|
3045 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
|
|
3046 peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack);
|
|
3047 live = BITMAP_ALLOC (®_obstack);
|
|
3048
|
|
3049 FOR_EACH_BB_REVERSE (bb)
|
|
3050 {
|
|
3051 rtl_profile_for_bb (bb);
|
|
3052 /* Indicate that all slots except the last holds invalid data. */
|
|
3053 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
|
|
3054 peep2_insn_data[i].insn = NULL_RTX;
|
|
3055 peep2_current_count = 0;
|
|
3056
|
|
3057 /* Indicate that the last slot contains live_after data. */
|
|
3058 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
|
|
3059 peep2_current = MAX_INSNS_PER_PEEP2;
|
|
3060
|
|
3061 /* Start up propagation. */
|
|
3062 bitmap_copy (live, DF_LR_OUT (bb));
|
|
3063 df_simulate_initialize_backwards (bb, live);
|
|
3064 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
|
|
3065
|
|
3066 for (insn = BB_END (bb); ; insn = prev)
|
|
3067 {
|
|
3068 prev = PREV_INSN (insn);
|
|
3069 if (INSN_P (insn))
|
|
3070 {
|
|
3071 rtx attempt, before_try, x;
|
|
3072 int match_len;
|
|
3073 rtx note;
|
|
3074 bool was_call = false;
|
|
3075
|
|
3076 /* Record this insn. */
|
|
3077 if (--peep2_current < 0)
|
|
3078 peep2_current = MAX_INSNS_PER_PEEP2;
|
|
3079 if (peep2_current_count < MAX_INSNS_PER_PEEP2
|
|
3080 && peep2_insn_data[peep2_current].insn == NULL_RTX)
|
|
3081 peep2_current_count++;
|
|
3082 peep2_insn_data[peep2_current].insn = insn;
|
|
3083 df_simulate_one_insn_backwards (bb, insn, live);
|
|
3084 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
|
|
3085
|
|
3086 if (RTX_FRAME_RELATED_P (insn))
|
|
3087 {
|
|
3088 /* If an insn has RTX_FRAME_RELATED_P set, peephole
|
|
3089 substitution would lose the
|
|
3090 REG_FRAME_RELATED_EXPR that is attached. */
|
|
3091 peep2_current_count = 0;
|
|
3092 attempt = NULL;
|
|
3093 }
|
|
3094 else
|
|
3095 /* Match the peephole. */
|
|
3096 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
|
|
3097
|
|
3098 if (attempt != NULL)
|
|
3099 {
|
|
3100 /* If we are splitting a CALL_INSN, look for the CALL_INSN
|
|
3101 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
|
|
3102 cfg-related call notes. */
|
|
3103 for (i = 0; i <= match_len; ++i)
|
|
3104 {
|
|
3105 int j;
|
|
3106 rtx old_insn, new_insn, note;
|
|
3107
|
|
3108 j = i + peep2_current;
|
|
3109 if (j >= MAX_INSNS_PER_PEEP2 + 1)
|
|
3110 j -= MAX_INSNS_PER_PEEP2 + 1;
|
|
3111 old_insn = peep2_insn_data[j].insn;
|
|
3112 if (!CALL_P (old_insn))
|
|
3113 continue;
|
|
3114 was_call = true;
|
|
3115
|
|
3116 new_insn = attempt;
|
|
3117 while (new_insn != NULL_RTX)
|
|
3118 {
|
|
3119 if (CALL_P (new_insn))
|
|
3120 break;
|
|
3121 new_insn = NEXT_INSN (new_insn);
|
|
3122 }
|
|
3123
|
|
3124 gcc_assert (new_insn != NULL_RTX);
|
|
3125
|
|
3126 CALL_INSN_FUNCTION_USAGE (new_insn)
|
|
3127 = CALL_INSN_FUNCTION_USAGE (old_insn);
|
|
3128
|
|
3129 for (note = REG_NOTES (old_insn);
|
|
3130 note;
|
|
3131 note = XEXP (note, 1))
|
|
3132 switch (REG_NOTE_KIND (note))
|
|
3133 {
|
|
3134 case REG_NORETURN:
|
|
3135 case REG_SETJMP:
|
|
3136 add_reg_note (new_insn, REG_NOTE_KIND (note),
|
|
3137 XEXP (note, 0));
|
|
3138 break;
|
|
3139 default:
|
|
3140 /* Discard all other reg notes. */
|
|
3141 break;
|
|
3142 }
|
|
3143
|
|
3144 /* Croak if there is another call in the sequence. */
|
|
3145 while (++i <= match_len)
|
|
3146 {
|
|
3147 j = i + peep2_current;
|
|
3148 if (j >= MAX_INSNS_PER_PEEP2 + 1)
|
|
3149 j -= MAX_INSNS_PER_PEEP2 + 1;
|
|
3150 old_insn = peep2_insn_data[j].insn;
|
|
3151 gcc_assert (!CALL_P (old_insn));
|
|
3152 }
|
|
3153 break;
|
|
3154 }
|
|
3155
|
|
3156 i = match_len + peep2_current;
|
|
3157 if (i >= MAX_INSNS_PER_PEEP2 + 1)
|
|
3158 i -= MAX_INSNS_PER_PEEP2 + 1;
|
|
3159
|
|
3160 note = find_reg_note (peep2_insn_data[i].insn,
|
|
3161 REG_EH_REGION, NULL_RTX);
|
|
3162
|
|
3163 /* Replace the old sequence with the new. */
|
|
3164 attempt = emit_insn_after_setloc (attempt,
|
|
3165 peep2_insn_data[i].insn,
|
|
3166 INSN_LOCATOR (peep2_insn_data[i].insn));
|
|
3167 before_try = PREV_INSN (insn);
|
|
3168 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
|
|
3169
|
|
3170 /* Re-insert the EH_REGION notes. */
|
|
3171 if (note || (was_call && nonlocal_goto_handler_labels))
|
|
3172 {
|
|
3173 edge eh_edge;
|
|
3174 edge_iterator ei;
|
|
3175
|
|
3176 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
|
|
3177 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
|
|
3178 break;
|
|
3179
|
|
3180 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
|
|
3181 if (CALL_P (x)
|
|
3182 || (flag_non_call_exceptions
|
|
3183 && may_trap_p (PATTERN (x))
|
|
3184 && !find_reg_note (x, REG_EH_REGION, NULL)))
|
|
3185 {
|
|
3186 if (note)
|
|
3187 add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
|
|
3188
|
|
3189 if (x != BB_END (bb) && eh_edge)
|
|
3190 {
|
|
3191 edge nfte, nehe;
|
|
3192 int flags;
|
|
3193
|
|
3194 nfte = split_block (bb, x);
|
|
3195 flags = (eh_edge->flags
|
|
3196 & (EDGE_EH | EDGE_ABNORMAL));
|
|
3197 if (CALL_P (x))
|
|
3198 flags |= EDGE_ABNORMAL_CALL;
|
|
3199 nehe = make_edge (nfte->src, eh_edge->dest,
|
|
3200 flags);
|
|
3201
|
|
3202 nehe->probability = eh_edge->probability;
|
|
3203 nfte->probability
|
|
3204 = REG_BR_PROB_BASE - nehe->probability;
|
|
3205
|
|
3206 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
|
|
3207 bb = nfte->src;
|
|
3208 eh_edge = nehe;
|
|
3209 }
|
|
3210 }
|
|
3211
|
|
3212 /* Converting possibly trapping insn to non-trapping is
|
|
3213 possible. Zap dummy outgoing edges. */
|
|
3214 do_cleanup_cfg |= purge_dead_edges (bb);
|
|
3215 }
|
|
3216
|
|
3217 #ifdef HAVE_conditional_execution
|
|
3218 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
|
|
3219 peep2_insn_data[i].insn = NULL_RTX;
|
|
3220 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
|
|
3221 peep2_current_count = 0;
|
|
3222 #else
|
|
3223 /* Back up lifetime information past the end of the
|
|
3224 newly created sequence. */
|
|
3225 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
|
|
3226 i = 0;
|
|
3227 bitmap_copy (live, peep2_insn_data[i].live_before);
|
|
3228
|
|
3229 /* Update life information for the new sequence. */
|
|
3230 x = attempt;
|
|
3231 do
|
|
3232 {
|
|
3233 if (INSN_P (x))
|
|
3234 {
|
|
3235 if (--i < 0)
|
|
3236 i = MAX_INSNS_PER_PEEP2;
|
|
3237 if (peep2_current_count < MAX_INSNS_PER_PEEP2
|
|
3238 && peep2_insn_data[i].insn == NULL_RTX)
|
|
3239 peep2_current_count++;
|
|
3240 peep2_insn_data[i].insn = x;
|
|
3241 df_insn_rescan (x);
|
|
3242 df_simulate_one_insn_backwards (bb, x, live);
|
|
3243 bitmap_copy (peep2_insn_data[i].live_before, live);
|
|
3244 }
|
|
3245 x = PREV_INSN (x);
|
|
3246 }
|
|
3247 while (x != prev);
|
|
3248
|
|
3249 peep2_current = i;
|
|
3250 #endif
|
|
3251
|
|
3252 /* If we generated a jump instruction, it won't have
|
|
3253 JUMP_LABEL set. Recompute after we're done. */
|
|
3254 for (x = attempt; x != before_try; x = PREV_INSN (x))
|
|
3255 if (JUMP_P (x))
|
|
3256 {
|
|
3257 do_rebuild_jump_labels = true;
|
|
3258 break;
|
|
3259 }
|
|
3260 }
|
|
3261 }
|
|
3262
|
|
3263 if (insn == BB_HEAD (bb))
|
|
3264 break;
|
|
3265 }
|
|
3266 }
|
|
3267
|
|
3268 default_rtl_profile ();
|
|
3269 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
|
|
3270 BITMAP_FREE (peep2_insn_data[i].live_before);
|
|
3271 BITMAP_FREE (live);
|
|
3272 if (do_rebuild_jump_labels)
|
|
3273 rebuild_jump_labels (get_insns ());
|
|
3274 }
|
|
3275 #endif /* HAVE_peephole2 */
|
|
3276
|
|
3277 /* Common predicates for use with define_bypass. */
|
|
3278
|
|
3279 /* True if the dependency between OUT_INSN and IN_INSN is on the store
|
|
3280 data not the address operand(s) of the store. IN_INSN and OUT_INSN
|
|
3281 must be either a single_set or a PARALLEL with SETs inside. */
|
|
3282
|
|
3283 int
|
|
3284 store_data_bypass_p (rtx out_insn, rtx in_insn)
|
|
3285 {
|
|
3286 rtx out_set, in_set;
|
|
3287 rtx out_pat, in_pat;
|
|
3288 rtx out_exp, in_exp;
|
|
3289 int i, j;
|
|
3290
|
|
3291 in_set = single_set (in_insn);
|
|
3292 if (in_set)
|
|
3293 {
|
|
3294 if (!MEM_P (SET_DEST (in_set)))
|
|
3295 return false;
|
|
3296
|
|
3297 out_set = single_set (out_insn);
|
|
3298 if (out_set)
|
|
3299 {
|
|
3300 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
|
|
3301 return false;
|
|
3302 }
|
|
3303 else
|
|
3304 {
|
|
3305 out_pat = PATTERN (out_insn);
|
|
3306
|
|
3307 if (GET_CODE (out_pat) != PARALLEL)
|
|
3308 return false;
|
|
3309
|
|
3310 for (i = 0; i < XVECLEN (out_pat, 0); i++)
|
|
3311 {
|
|
3312 out_exp = XVECEXP (out_pat, 0, i);
|
|
3313
|
|
3314 if (GET_CODE (out_exp) == CLOBBER)
|
|
3315 continue;
|
|
3316
|
|
3317 gcc_assert (GET_CODE (out_exp) == SET);
|
|
3318
|
|
3319 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
|
|
3320 return false;
|
|
3321 }
|
|
3322 }
|
|
3323 }
|
|
3324 else
|
|
3325 {
|
|
3326 in_pat = PATTERN (in_insn);
|
|
3327 gcc_assert (GET_CODE (in_pat) == PARALLEL);
|
|
3328
|
|
3329 for (i = 0; i < XVECLEN (in_pat, 0); i++)
|
|
3330 {
|
|
3331 in_exp = XVECEXP (in_pat, 0, i);
|
|
3332
|
|
3333 if (GET_CODE (in_exp) == CLOBBER)
|
|
3334 continue;
|
|
3335
|
|
3336 gcc_assert (GET_CODE (in_exp) == SET);
|
|
3337
|
|
3338 if (!MEM_P (SET_DEST (in_exp)))
|
|
3339 return false;
|
|
3340
|
|
3341 out_set = single_set (out_insn);
|
|
3342 if (out_set)
|
|
3343 {
|
|
3344 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
|
|
3345 return false;
|
|
3346 }
|
|
3347 else
|
|
3348 {
|
|
3349 out_pat = PATTERN (out_insn);
|
|
3350 gcc_assert (GET_CODE (out_pat) == PARALLEL);
|
|
3351
|
|
3352 for (j = 0; j < XVECLEN (out_pat, 0); j++)
|
|
3353 {
|
|
3354 out_exp = XVECEXP (out_pat, 0, j);
|
|
3355
|
|
3356 if (GET_CODE (out_exp) == CLOBBER)
|
|
3357 continue;
|
|
3358
|
|
3359 gcc_assert (GET_CODE (out_exp) == SET);
|
|
3360
|
|
3361 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
|
|
3362 return false;
|
|
3363 }
|
|
3364 }
|
|
3365 }
|
|
3366 }
|
|
3367
|
|
3368 return true;
|
|
3369 }
|
|
3370
|
|
3371 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
|
|
3372 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
|
|
3373 or multiple set; IN_INSN should be single_set for truth, but for convenience
|
|
3374 of insn categorization may be any JUMP or CALL insn. */
|
|
3375
|
|
3376 int
|
|
3377 if_test_bypass_p (rtx out_insn, rtx in_insn)
|
|
3378 {
|
|
3379 rtx out_set, in_set;
|
|
3380
|
|
3381 in_set = single_set (in_insn);
|
|
3382 if (! in_set)
|
|
3383 {
|
|
3384 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
|
|
3385 return false;
|
|
3386 }
|
|
3387
|
|
3388 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
|
|
3389 return false;
|
|
3390 in_set = SET_SRC (in_set);
|
|
3391
|
|
3392 out_set = single_set (out_insn);
|
|
3393 if (out_set)
|
|
3394 {
|
|
3395 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
|
|
3396 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
|
|
3397 return false;
|
|
3398 }
|
|
3399 else
|
|
3400 {
|
|
3401 rtx out_pat;
|
|
3402 int i;
|
|
3403
|
|
3404 out_pat = PATTERN (out_insn);
|
|
3405 gcc_assert (GET_CODE (out_pat) == PARALLEL);
|
|
3406
|
|
3407 for (i = 0; i < XVECLEN (out_pat, 0); i++)
|
|
3408 {
|
|
3409 rtx exp = XVECEXP (out_pat, 0, i);
|
|
3410
|
|
3411 if (GET_CODE (exp) == CLOBBER)
|
|
3412 continue;
|
|
3413
|
|
3414 gcc_assert (GET_CODE (exp) == SET);
|
|
3415
|
|
3416 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
|
|
3417 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
|
|
3418 return false;
|
|
3419 }
|
|
3420 }
|
|
3421
|
|
3422 return true;
|
|
3423 }
|
|
3424
|
|
3425 static bool
|
|
3426 gate_handle_peephole2 (void)
|
|
3427 {
|
|
3428 return (optimize > 0 && flag_peephole2);
|
|
3429 }
|
|
3430
|
|
3431 static unsigned int
|
|
3432 rest_of_handle_peephole2 (void)
|
|
3433 {
|
|
3434 #ifdef HAVE_peephole2
|
|
3435 peephole2_optimize ();
|
|
3436 #endif
|
|
3437 return 0;
|
|
3438 }
|
|
3439
|
|
3440 struct rtl_opt_pass pass_peephole2 =
|
|
3441 {
|
|
3442 {
|
|
3443 RTL_PASS,
|
|
3444 "peephole2", /* name */
|
|
3445 gate_handle_peephole2, /* gate */
|
|
3446 rest_of_handle_peephole2, /* execute */
|
|
3447 NULL, /* sub */
|
|
3448 NULL, /* next */
|
|
3449 0, /* static_pass_number */
|
|
3450 TV_PEEPHOLE2, /* tv_id */
|
|
3451 0, /* properties_required */
|
|
3452 0, /* properties_provided */
|
|
3453 0, /* properties_destroyed */
|
|
3454 0, /* todo_flags_start */
|
|
3455 TODO_df_finish | TODO_verify_rtl_sharing |
|
|
3456 TODO_dump_func /* todo_flags_finish */
|
|
3457 }
|
|
3458 };
|
|
3459
|
|
3460 static unsigned int
|
|
3461 rest_of_handle_split_all_insns (void)
|
|
3462 {
|
|
3463 split_all_insns ();
|
|
3464 return 0;
|
|
3465 }
|
|
3466
|
|
3467 struct rtl_opt_pass pass_split_all_insns =
|
|
3468 {
|
|
3469 {
|
|
3470 RTL_PASS,
|
|
3471 "split1", /* name */
|
|
3472 NULL, /* gate */
|
|
3473 rest_of_handle_split_all_insns, /* execute */
|
|
3474 NULL, /* sub */
|
|
3475 NULL, /* next */
|
|
3476 0, /* static_pass_number */
|
|
3477 0, /* tv_id */
|
|
3478 0, /* properties_required */
|
|
3479 0, /* properties_provided */
|
|
3480 0, /* properties_destroyed */
|
|
3481 0, /* todo_flags_start */
|
|
3482 TODO_dump_func /* todo_flags_finish */
|
|
3483 }
|
|
3484 };
|
|
3485
|
|
3486 static unsigned int
|
|
3487 rest_of_handle_split_after_reload (void)
|
|
3488 {
|
|
3489 /* If optimizing, then go ahead and split insns now. */
|
|
3490 #ifndef STACK_REGS
|
|
3491 if (optimize > 0)
|
|
3492 #endif
|
|
3493 split_all_insns ();
|
|
3494 return 0;
|
|
3495 }
|
|
3496
|
|
3497 struct rtl_opt_pass pass_split_after_reload =
|
|
3498 {
|
|
3499 {
|
|
3500 RTL_PASS,
|
|
3501 "split2", /* name */
|
|
3502 NULL, /* gate */
|
|
3503 rest_of_handle_split_after_reload, /* execute */
|
|
3504 NULL, /* sub */
|
|
3505 NULL, /* next */
|
|
3506 0, /* static_pass_number */
|
|
3507 0, /* tv_id */
|
|
3508 0, /* properties_required */
|
|
3509 0, /* properties_provided */
|
|
3510 0, /* properties_destroyed */
|
|
3511 0, /* todo_flags_start */
|
|
3512 TODO_dump_func /* todo_flags_finish */
|
|
3513 }
|
|
3514 };
|
|
3515
|
|
3516 static bool
|
|
3517 gate_handle_split_before_regstack (void)
|
|
3518 {
|
|
3519 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
|
|
3520 /* If flow2 creates new instructions which need splitting
|
|
3521 and scheduling after reload is not done, they might not be
|
|
3522 split until final which doesn't allow splitting
|
|
3523 if HAVE_ATTR_length. */
|
|
3524 # ifdef INSN_SCHEDULING
|
|
3525 return (optimize && !flag_schedule_insns_after_reload);
|
|
3526 # else
|
|
3527 return (optimize);
|
|
3528 # endif
|
|
3529 #else
|
|
3530 return 0;
|
|
3531 #endif
|
|
3532 }
|
|
3533
|
|
3534 static unsigned int
|
|
3535 rest_of_handle_split_before_regstack (void)
|
|
3536 {
|
|
3537 split_all_insns ();
|
|
3538 return 0;
|
|
3539 }
|
|
3540
|
|
3541 struct rtl_opt_pass pass_split_before_regstack =
|
|
3542 {
|
|
3543 {
|
|
3544 RTL_PASS,
|
|
3545 "split3", /* name */
|
|
3546 gate_handle_split_before_regstack, /* gate */
|
|
3547 rest_of_handle_split_before_regstack, /* execute */
|
|
3548 NULL, /* sub */
|
|
3549 NULL, /* next */
|
|
3550 0, /* static_pass_number */
|
|
3551 0, /* tv_id */
|
|
3552 0, /* properties_required */
|
|
3553 0, /* properties_provided */
|
|
3554 0, /* properties_destroyed */
|
|
3555 0, /* todo_flags_start */
|
|
3556 TODO_dump_func /* todo_flags_finish */
|
|
3557 }
|
|
3558 };
|
|
3559
|
|
3560 static bool
|
|
3561 gate_handle_split_before_sched2 (void)
|
|
3562 {
|
|
3563 #ifdef INSN_SCHEDULING
|
|
3564 return optimize > 0 && flag_schedule_insns_after_reload;
|
|
3565 #else
|
|
3566 return 0;
|
|
3567 #endif
|
|
3568 }
|
|
3569
|
|
3570 static unsigned int
|
|
3571 rest_of_handle_split_before_sched2 (void)
|
|
3572 {
|
|
3573 #ifdef INSN_SCHEDULING
|
|
3574 split_all_insns ();
|
|
3575 #endif
|
|
3576 return 0;
|
|
3577 }
|
|
3578
|
|
3579 struct rtl_opt_pass pass_split_before_sched2 =
|
|
3580 {
|
|
3581 {
|
|
3582 RTL_PASS,
|
|
3583 "split4", /* name */
|
|
3584 gate_handle_split_before_sched2, /* gate */
|
|
3585 rest_of_handle_split_before_sched2, /* execute */
|
|
3586 NULL, /* sub */
|
|
3587 NULL, /* next */
|
|
3588 0, /* static_pass_number */
|
|
3589 0, /* tv_id */
|
|
3590 0, /* properties_required */
|
|
3591 0, /* properties_provided */
|
|
3592 0, /* properties_destroyed */
|
|
3593 0, /* todo_flags_start */
|
|
3594 TODO_verify_flow |
|
|
3595 TODO_dump_func /* todo_flags_finish */
|
|
3596 }
|
|
3597 };
|
|
3598
|
|
3599 /* The placement of the splitting that we do for shorten_branches
|
|
3600 depends on whether regstack is used by the target or not. */
|
|
3601 static bool
|
|
3602 gate_do_final_split (void)
|
|
3603 {
|
|
3604 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
|
|
3605 return 1;
|
|
3606 #else
|
|
3607 return 0;
|
|
3608 #endif
|
|
3609 }
|
|
3610
|
|
3611 struct rtl_opt_pass pass_split_for_shorten_branches =
|
|
3612 {
|
|
3613 {
|
|
3614 RTL_PASS,
|
|
3615 "split5", /* name */
|
|
3616 gate_do_final_split, /* gate */
|
|
3617 split_all_insns_noflow, /* execute */
|
|
3618 NULL, /* sub */
|
|
3619 NULL, /* next */
|
|
3620 0, /* static_pass_number */
|
|
3621 0, /* tv_id */
|
|
3622 0, /* properties_required */
|
|
3623 0, /* properties_provided */
|
|
3624 0, /* properties_destroyed */
|
|
3625 0, /* todo_flags_start */
|
|
3626 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
|
|
3627 }
|
|
3628 };
|
|
3629
|
|
3630
|