Mercurial > hg > CbC > CbC_gcc
annotate gcc/dojump.c @ 36:855418dad1a3
gcc-4.4-20091020
author | e075725 |
---|---|
date | Tue, 22 Dec 2009 21:19:31 +0900 |
parents | 58ad6c70ea60 |
children | 77e2b8dfacca |
rev | line source |
---|---|
0 | 1 /* Convert tree expression to rtl instructions, for GNU compiler. |
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, | |
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | |
4 Free Software Foundation, Inc. | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "rtl.h" | |
27 #include "tree.h" | |
28 #include "flags.h" | |
29 #include "function.h" | |
30 #include "insn-config.h" | |
31 #include "insn-attr.h" | |
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ | |
33 #include "expr.h" | |
34 #include "optabs.h" | |
35 #include "langhooks.h" | |
36 #include "ggc.h" | |
37 #include "basic-block.h" | |
38 | |
39 static bool prefer_and_bit_test (enum machine_mode, int); | |
40 static void do_jump_by_parts_greater (tree, int, rtx, rtx); | |
41 static void do_jump_by_parts_equality (tree, rtx, rtx); | |
42 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx, | |
43 rtx); | |
44 | |
45 /* At the start of a function, record that we have no previously-pushed | |
46 arguments waiting to be popped. */ | |
47 | |
48 void | |
49 init_pending_stack_adjust (void) | |
50 { | |
51 pending_stack_adjust = 0; | |
52 } | |
53 | |
54 /* Discard any pending stack adjustment. This avoid relying on the | |
55 RTL optimizers to remove useless adjustments when we know the | |
56 stack pointer value is dead. */ | |
57 void | |
58 discard_pending_stack_adjust (void) | |
59 { | |
60 stack_pointer_delta -= pending_stack_adjust; | |
61 pending_stack_adjust = 0; | |
62 } | |
63 | |
64 /* When exiting from function, if safe, clear out any pending stack adjust | |
65 so the adjustment won't get done. | |
66 | |
67 Note, if the current function calls alloca, then it must have a | |
68 frame pointer regardless of the value of flag_omit_frame_pointer. */ | |
69 | |
70 void | |
71 clear_pending_stack_adjust (void) | |
72 { | |
73 if (optimize > 0 | |
74 && (! flag_omit_frame_pointer || cfun->calls_alloca) | |
75 && EXIT_IGNORE_STACK) | |
76 discard_pending_stack_adjust (); | |
77 } | |
78 | |
79 /* Pop any previously-pushed arguments that have not been popped yet. */ | |
80 | |
81 void | |
82 do_pending_stack_adjust (void) | |
83 { | |
84 if (inhibit_defer_pop == 0) | |
85 { | |
86 if (pending_stack_adjust != 0) | |
87 adjust_stack (GEN_INT (pending_stack_adjust)); | |
88 pending_stack_adjust = 0; | |
89 } | |
90 } | |
91 | |
92 /* Expand conditional expressions. */ | |
93 | |
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. | |
95 LABEL is an rtx of code CODE_LABEL, in this function and all the | |
96 functions here. */ | |
97 | |
98 void | |
99 jumpifnot (tree exp, rtx label) | |
100 { | |
101 do_jump (exp, label, NULL_RTX); | |
102 } | |
103 | |
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ | |
105 | |
106 void | |
107 jumpif (tree exp, rtx label) | |
108 { | |
109 do_jump (exp, NULL_RTX, label); | |
110 } | |
111 | |
112 /* Used internally by prefer_and_bit_test. */ | |
113 | |
114 static GTY(()) rtx and_reg; | |
115 static GTY(()) rtx and_test; | |
116 static GTY(()) rtx shift_test; | |
117 | |
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1", | |
119 where X is an arbitrary register of mode MODE. Return true if the former | |
120 is preferred. */ | |
121 | |
122 static bool | |
123 prefer_and_bit_test (enum machine_mode mode, int bitnum) | |
124 { | |
125 if (and_test == 0) | |
126 { | |
127 /* Set up rtxes for the two variations. Use NULL as a placeholder | |
128 for the BITNUM-based constants. */ | |
129 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER); | |
130 and_test = gen_rtx_AND (mode, and_reg, NULL); | |
131 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL), | |
132 const1_rtx); | |
133 } | |
134 else | |
135 { | |
136 /* Change the mode of the previously-created rtxes. */ | |
137 PUT_MODE (and_reg, mode); | |
138 PUT_MODE (and_test, mode); | |
139 PUT_MODE (shift_test, mode); | |
140 PUT_MODE (XEXP (shift_test, 0), mode); | |
141 } | |
142 | |
143 /* Fill in the integers. */ | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
144 XEXP (and_test, 1) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
145 = immed_double_const ((unsigned HOST_WIDE_INT) 1 << bitnum, 0, mode); |
0 | 146 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum); |
147 | |
148 return (rtx_cost (and_test, IF_THEN_ELSE, optimize_insn_for_speed_p ()) | |
149 <= rtx_cost (shift_test, IF_THEN_ELSE, optimize_insn_for_speed_p ())); | |
150 } | |
151 | |
152 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if | |
153 the result is zero, or IF_TRUE_LABEL if the result is one. | |
154 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, | |
155 meaning fall through in that case. | |
156 | |
157 do_jump always does any pending stack adjust except when it does not | |
158 actually perform a jump. An example where there is no jump | |
159 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */ | |
160 | |
161 void | |
162 do_jump (tree exp, rtx if_false_label, rtx if_true_label) | |
163 { | |
164 enum tree_code code = TREE_CODE (exp); | |
165 rtx temp; | |
166 int i; | |
167 tree type; | |
168 enum machine_mode mode; | |
169 rtx drop_through_label = 0; | |
170 | |
171 switch (code) | |
172 { | |
173 case ERROR_MARK: | |
174 break; | |
175 | |
176 case INTEGER_CST: | |
177 temp = integer_zerop (exp) ? if_false_label : if_true_label; | |
178 if (temp) | |
179 emit_jump (temp); | |
180 break; | |
181 | |
182 #if 0 | |
183 /* This is not true with #pragma weak */ | |
184 case ADDR_EXPR: | |
185 /* The address of something can never be zero. */ | |
186 if (if_true_label) | |
187 emit_jump (if_true_label); | |
188 break; | |
189 #endif | |
190 | |
191 case NOP_EXPR: | |
192 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF | |
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF | |
194 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF | |
195 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) | |
196 goto normal; | |
197 case CONVERT_EXPR: | |
198 /* If we are narrowing the operand, we have to do the compare in the | |
199 narrower mode. */ | |
200 if ((TYPE_PRECISION (TREE_TYPE (exp)) | |
201 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
202 goto normal; | |
203 case NON_LVALUE_EXPR: | |
204 case ABS_EXPR: | |
205 case NEGATE_EXPR: | |
206 case LROTATE_EXPR: | |
207 case RROTATE_EXPR: | |
208 /* These cannot change zero->nonzero or vice versa. */ | |
209 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
210 break; | |
211 | |
212 case TRUTH_NOT_EXPR: | |
213 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
214 break; | |
215 | |
216 case COND_EXPR: | |
217 { | |
218 rtx label1 = gen_label_rtx (); | |
219 if (!if_true_label || !if_false_label) | |
220 { | |
221 drop_through_label = gen_label_rtx (); | |
222 if (!if_true_label) | |
223 if_true_label = drop_through_label; | |
224 if (!if_false_label) | |
225 if_false_label = drop_through_label; | |
226 } | |
227 | |
228 do_pending_stack_adjust (); | |
229 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); | |
230 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
231 emit_label (label1); | |
232 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label); | |
233 break; | |
234 } | |
235 | |
236 case COMPOUND_EXPR: | |
237 /* Lowered by gimplify.c. */ | |
238 gcc_unreachable (); | |
239 | |
240 case COMPONENT_REF: | |
241 case BIT_FIELD_REF: | |
242 case ARRAY_REF: | |
243 case ARRAY_RANGE_REF: | |
244 { | |
245 HOST_WIDE_INT bitsize, bitpos; | |
246 int unsignedp; | |
247 enum machine_mode mode; | |
248 tree type; | |
249 tree offset; | |
250 int volatilep = 0; | |
251 | |
252 /* Get description of this reference. We don't actually care | |
253 about the underlying object here. */ | |
254 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, | |
255 &unsignedp, &volatilep, false); | |
256 | |
257 type = lang_hooks.types.type_for_size (bitsize, unsignedp); | |
258 if (! SLOW_BYTE_ACCESS | |
259 && type != 0 && bitsize >= 0 | |
260 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
261 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code | |
262 != CODE_FOR_nothing)) | |
263 { | |
264 do_jump (fold_convert (type, exp), if_false_label, if_true_label); | |
265 break; | |
266 } | |
267 goto normal; | |
268 } | |
269 | |
270 case EQ_EXPR: | |
271 { | |
272 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
273 | |
274 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) | |
275 != MODE_COMPLEX_FLOAT); | |
276 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) | |
277 != MODE_COMPLEX_INT); | |
278 | |
279 if (integer_zerop (TREE_OPERAND (exp, 1))) | |
280 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
281 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT | |
282 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) | |
283 do_jump_by_parts_equality (exp, if_false_label, if_true_label); | |
284 else | |
285 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); | |
286 break; | |
287 } | |
288 | |
289 case MINUS_EXPR: | |
290 /* Nonzero iff operands of minus differ. */ | |
291 exp = build2 (NE_EXPR, TREE_TYPE (exp), | |
292 TREE_OPERAND (exp, 0), | |
293 TREE_OPERAND (exp, 1)); | |
294 /* FALLTHRU */ | |
295 case NE_EXPR: | |
296 { | |
297 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
298 | |
299 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) | |
300 != MODE_COMPLEX_FLOAT); | |
301 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) | |
302 != MODE_COMPLEX_INT); | |
303 | |
304 if (integer_zerop (TREE_OPERAND (exp, 1))) | |
305 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
306 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT | |
307 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) | |
308 do_jump_by_parts_equality (exp, if_true_label, if_false_label); | |
309 else | |
310 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); | |
311 break; | |
312 } | |
313 | |
314 case LT_EXPR: | |
315 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
316 if (GET_MODE_CLASS (mode) == MODE_INT | |
317 && ! can_compare_p (LT, mode, ccp_jump)) | |
318 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); | |
319 else | |
320 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); | |
321 break; | |
322 | |
323 case LE_EXPR: | |
324 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
325 if (GET_MODE_CLASS (mode) == MODE_INT | |
326 && ! can_compare_p (LE, mode, ccp_jump)) | |
327 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); | |
328 else | |
329 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); | |
330 break; | |
331 | |
332 case GT_EXPR: | |
333 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
334 if (GET_MODE_CLASS (mode) == MODE_INT | |
335 && ! can_compare_p (GT, mode, ccp_jump)) | |
336 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); | |
337 else | |
338 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); | |
339 break; | |
340 | |
341 case GE_EXPR: | |
342 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
343 if (GET_MODE_CLASS (mode) == MODE_INT | |
344 && ! can_compare_p (GE, mode, ccp_jump)) | |
345 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); | |
346 else | |
347 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); | |
348 break; | |
349 | |
350 case UNORDERED_EXPR: | |
351 case ORDERED_EXPR: | |
352 { | |
353 enum rtx_code cmp, rcmp; | |
354 int do_rev; | |
355 | |
356 if (code == UNORDERED_EXPR) | |
357 cmp = UNORDERED, rcmp = ORDERED; | |
358 else | |
359 cmp = ORDERED, rcmp = UNORDERED; | |
360 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
361 | |
362 do_rev = 0; | |
363 if (! can_compare_p (cmp, mode, ccp_jump) | |
364 && (can_compare_p (rcmp, mode, ccp_jump) | |
365 /* If the target doesn't provide either UNORDERED or ORDERED | |
366 comparisons, canonicalize on UNORDERED for the library. */ | |
367 || rcmp == UNORDERED)) | |
368 do_rev = 1; | |
369 | |
370 if (! do_rev) | |
371 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); | |
372 else | |
373 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); | |
374 } | |
375 break; | |
376 | |
377 { | |
378 enum rtx_code rcode1; | |
379 enum tree_code tcode1, tcode2; | |
380 | |
381 case UNLT_EXPR: | |
382 rcode1 = UNLT; | |
383 tcode1 = UNORDERED_EXPR; | |
384 tcode2 = LT_EXPR; | |
385 goto unordered_bcc; | |
386 case UNLE_EXPR: | |
387 rcode1 = UNLE; | |
388 tcode1 = UNORDERED_EXPR; | |
389 tcode2 = LE_EXPR; | |
390 goto unordered_bcc; | |
391 case UNGT_EXPR: | |
392 rcode1 = UNGT; | |
393 tcode1 = UNORDERED_EXPR; | |
394 tcode2 = GT_EXPR; | |
395 goto unordered_bcc; | |
396 case UNGE_EXPR: | |
397 rcode1 = UNGE; | |
398 tcode1 = UNORDERED_EXPR; | |
399 tcode2 = GE_EXPR; | |
400 goto unordered_bcc; | |
401 case UNEQ_EXPR: | |
402 rcode1 = UNEQ; | |
403 tcode1 = UNORDERED_EXPR; | |
404 tcode2 = EQ_EXPR; | |
405 goto unordered_bcc; | |
406 case LTGT_EXPR: | |
407 /* It is ok for LTGT_EXPR to trap when the result is unordered, | |
408 so expand to (a < b) || (a > b). */ | |
409 rcode1 = LTGT; | |
410 tcode1 = LT_EXPR; | |
411 tcode2 = GT_EXPR; | |
412 goto unordered_bcc; | |
413 | |
414 unordered_bcc: | |
415 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
416 if (can_compare_p (rcode1, mode, ccp_jump)) | |
417 do_compare_and_jump (exp, rcode1, rcode1, if_false_label, | |
418 if_true_label); | |
419 else | |
420 { | |
421 tree op0 = save_expr (TREE_OPERAND (exp, 0)); | |
422 tree op1 = save_expr (TREE_OPERAND (exp, 1)); | |
423 tree cmp0, cmp1; | |
424 | |
425 /* If the target doesn't support combined unordered | |
426 compares, decompose into two comparisons. */ | |
427 if (if_true_label == 0) | |
428 drop_through_label = if_true_label = gen_label_rtx (); | |
429 | |
430 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1); | |
431 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1); | |
432 do_jump (cmp0, 0, if_true_label); | |
433 do_jump (cmp1, if_false_label, if_true_label); | |
434 } | |
435 break; | |
436 } | |
437 | |
438 case BIT_AND_EXPR: | |
439 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1. | |
440 See if the former is preferred for jump tests and restore it | |
441 if so. */ | |
442 if (integer_onep (TREE_OPERAND (exp, 1))) | |
443 { | |
444 tree exp0 = TREE_OPERAND (exp, 0); | |
445 rtx set_label, clr_label; | |
446 | |
447 /* Strip narrowing integral type conversions. */ | |
448 while (CONVERT_EXPR_P (exp0) | |
449 && TREE_OPERAND (exp0, 0) != error_mark_node | |
450 && TYPE_PRECISION (TREE_TYPE (exp0)) | |
451 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0)))) | |
452 exp0 = TREE_OPERAND (exp0, 0); | |
453 | |
454 /* "exp0 ^ 1" inverts the sense of the single bit test. */ | |
455 if (TREE_CODE (exp0) == BIT_XOR_EXPR | |
456 && integer_onep (TREE_OPERAND (exp0, 1))) | |
457 { | |
458 exp0 = TREE_OPERAND (exp0, 0); | |
459 clr_label = if_true_label; | |
460 set_label = if_false_label; | |
461 } | |
462 else | |
463 { | |
464 clr_label = if_false_label; | |
465 set_label = if_true_label; | |
466 } | |
467 | |
468 if (TREE_CODE (exp0) == RSHIFT_EXPR) | |
469 { | |
470 tree arg = TREE_OPERAND (exp0, 0); | |
471 tree shift = TREE_OPERAND (exp0, 1); | |
472 tree argtype = TREE_TYPE (arg); | |
473 if (TREE_CODE (shift) == INTEGER_CST | |
474 && compare_tree_int (shift, 0) >= 0 | |
475 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0 | |
476 && prefer_and_bit_test (TYPE_MODE (argtype), | |
477 TREE_INT_CST_LOW (shift))) | |
478 { | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
479 unsigned HOST_WIDE_INT mask |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
480 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift); |
0 | 481 do_jump (build2 (BIT_AND_EXPR, argtype, arg, |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
482 build_int_cst_wide_type (argtype, mask, 0)), |
0 | 483 clr_label, set_label); |
484 break; | |
485 } | |
486 } | |
487 } | |
488 | |
489 /* If we are AND'ing with a small constant, do this comparison in the | |
490 smallest type that fits. If the machine doesn't have comparisons | |
491 that small, it will be converted back to the wider comparison. | |
492 This helps if we are testing the sign bit of a narrower object. | |
493 combine can't do this for us because it can't know whether a | |
494 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ | |
495 | |
496 if (! SLOW_BYTE_ACCESS | |
497 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
498 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT | |
499 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 | |
500 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode | |
501 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0 | |
502 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
503 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code | |
504 != CODE_FOR_nothing)) | |
505 { | |
506 do_jump (fold_convert (type, exp), if_false_label, if_true_label); | |
507 break; | |
508 } | |
509 | |
510 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1 | |
511 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
512 goto normal; | |
513 | |
514 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */ | |
515 | |
516 case TRUTH_AND_EXPR: | |
517 /* High branch cost, expand as the bitwise AND of the conditions. | |
518 Do the same if the RHS has side effects, because we're effectively | |
519 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */ | |
520 if (BRANCH_COST (optimize_insn_for_speed_p (), | |
521 false) >= 4 | |
522 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
523 goto normal; | |
524 | |
525 case TRUTH_ANDIF_EXPR: | |
526 if (if_false_label == NULL_RTX) | |
527 { | |
528 drop_through_label = gen_label_rtx (); | |
529 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX); | |
530 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label); | |
531 } | |
532 else | |
533 { | |
534 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); | |
535 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
536 } | |
537 break; | |
538 | |
539 case BIT_IOR_EXPR: | |
540 case TRUTH_OR_EXPR: | |
541 /* High branch cost, expand as the bitwise OR of the conditions. | |
542 Do the same if the RHS has side effects, because we're effectively | |
543 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */ | |
544 if (BRANCH_COST (optimize_insn_for_speed_p (), false)>= 4 | |
545 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
546 goto normal; | |
547 | |
548 case TRUTH_ORIF_EXPR: | |
549 if (if_true_label == NULL_RTX) | |
550 { | |
551 drop_through_label = gen_label_rtx (); | |
552 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label); | |
553 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX); | |
554 } | |
555 else | |
556 { | |
557 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); | |
558 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
559 } | |
560 break; | |
561 | |
562 /* Fall through and generate the normal code. */ | |
563 default: | |
564 normal: | |
565 temp = expand_normal (exp); | |
566 do_pending_stack_adjust (); | |
567 /* The RTL optimizers prefer comparisons against pseudos. */ | |
568 if (GET_CODE (temp) == SUBREG) | |
569 { | |
570 /* Compare promoted variables in their promoted mode. */ | |
571 if (SUBREG_PROMOTED_VAR_P (temp) | |
572 && REG_P (XEXP (temp, 0))) | |
573 temp = XEXP (temp, 0); | |
574 else | |
575 temp = copy_to_reg (temp); | |
576 } | |
577 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), | |
578 NE, TYPE_UNSIGNED (TREE_TYPE (exp)), | |
579 GET_MODE (temp), NULL_RTX, | |
580 if_false_label, if_true_label); | |
581 } | |
582 | |
583 if (drop_through_label) | |
584 { | |
585 do_pending_stack_adjust (); | |
586 emit_label (drop_through_label); | |
587 } | |
588 } | |
589 | |
590 /* Compare OP0 with OP1, word at a time, in mode MODE. | |
591 UNSIGNEDP says to do unsigned comparison. | |
592 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ | |
593 | |
594 static void | |
595 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0, | |
596 rtx op1, rtx if_false_label, rtx if_true_label) | |
597 { | |
598 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); | |
599 rtx drop_through_label = 0; | |
600 int i; | |
601 | |
602 if (! if_true_label || ! if_false_label) | |
603 drop_through_label = gen_label_rtx (); | |
604 if (! if_true_label) | |
605 if_true_label = drop_through_label; | |
606 if (! if_false_label) | |
607 if_false_label = drop_through_label; | |
608 | |
609 /* Compare a word at a time, high order first. */ | |
610 for (i = 0; i < nwords; i++) | |
611 { | |
612 rtx op0_word, op1_word; | |
613 | |
614 if (WORDS_BIG_ENDIAN) | |
615 { | |
616 op0_word = operand_subword_force (op0, i, mode); | |
617 op1_word = operand_subword_force (op1, i, mode); | |
618 } | |
619 else | |
620 { | |
621 op0_word = operand_subword_force (op0, nwords - 1 - i, mode); | |
622 op1_word = operand_subword_force (op1, nwords - 1 - i, mode); | |
623 } | |
624 | |
625 /* All but high-order word must be compared as unsigned. */ | |
626 do_compare_rtx_and_jump (op0_word, op1_word, GT, | |
627 (unsignedp || i > 0), word_mode, NULL_RTX, | |
628 NULL_RTX, if_true_label); | |
629 | |
630 /* Consider lower words only if these are equal. */ | |
631 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, | |
632 NULL_RTX, NULL_RTX, if_false_label); | |
633 } | |
634 | |
635 if (if_false_label) | |
636 emit_jump (if_false_label); | |
637 if (drop_through_label) | |
638 emit_label (drop_through_label); | |
639 } | |
640 | |
641 /* Given a comparison expression EXP for values too wide to be compared | |
642 with one insn, test the comparison and jump to the appropriate label. | |
643 The code of EXP is ignored; we always test GT if SWAP is 0, | |
644 and LT if SWAP is 1. */ | |
645 | |
646 static void | |
647 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label, | |
648 rtx if_true_label) | |
649 { | |
650 rtx op0 = expand_normal (TREE_OPERAND (exp, swap)); | |
651 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap)); | |
652 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
653 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
654 | |
655 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, | |
656 if_true_label); | |
657 } | |
658 | |
659 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer | |
660 mode, MODE, that is too wide for the available compare insns. Either | |
661 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX | |
662 to indicate drop through. */ | |
663 | |
664 static void | |
665 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0, | |
666 rtx if_false_label, rtx if_true_label) | |
667 { | |
668 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
669 rtx part; | |
670 int i; | |
671 rtx drop_through_label = 0; | |
672 | |
673 /* The fastest way of doing this comparison on almost any machine is to | |
674 "or" all the words and compare the result. If all have to be loaded | |
675 from memory and this is a very wide item, it's possible this may | |
676 be slower, but that's highly unlikely. */ | |
677 | |
678 part = gen_reg_rtx (word_mode); | |
679 emit_move_insn (part, operand_subword_force (op0, 0, mode)); | |
680 for (i = 1; i < nwords && part != 0; i++) | |
681 part = expand_binop (word_mode, ior_optab, part, | |
682 operand_subword_force (op0, i, mode), | |
683 part, 1, OPTAB_WIDEN); | |
684 | |
685 if (part != 0) | |
686 { | |
687 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, | |
688 NULL_RTX, if_false_label, if_true_label); | |
689 | |
690 return; | |
691 } | |
692 | |
693 /* If we couldn't do the "or" simply, do this with a series of compares. */ | |
694 if (! if_false_label) | |
695 drop_through_label = if_false_label = gen_label_rtx (); | |
696 | |
697 for (i = 0; i < nwords; i++) | |
698 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), | |
699 const0_rtx, EQ, 1, word_mode, NULL_RTX, | |
700 if_false_label, NULL_RTX); | |
701 | |
702 if (if_true_label) | |
703 emit_jump (if_true_label); | |
704 | |
705 if (drop_through_label) | |
706 emit_label (drop_through_label); | |
707 } | |
708 | |
709 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE, | |
710 where MODE is an integer mode too wide to be compared with one insn. | |
711 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX | |
712 to indicate drop through. */ | |
713 | |
714 static void | |
715 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1, | |
716 rtx if_false_label, rtx if_true_label) | |
717 { | |
718 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); | |
719 rtx drop_through_label = 0; | |
720 int i; | |
721 | |
722 if (op1 == const0_rtx) | |
723 { | |
724 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label); | |
725 return; | |
726 } | |
727 else if (op0 == const0_rtx) | |
728 { | |
729 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label); | |
730 return; | |
731 } | |
732 | |
733 if (! if_false_label) | |
734 drop_through_label = if_false_label = gen_label_rtx (); | |
735 | |
736 for (i = 0; i < nwords; i++) | |
737 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), | |
738 operand_subword_force (op1, i, mode), | |
739 EQ, 0, word_mode, NULL_RTX, | |
740 if_false_label, NULL_RTX); | |
741 | |
742 if (if_true_label) | |
743 emit_jump (if_true_label); | |
744 if (drop_through_label) | |
745 emit_label (drop_through_label); | |
746 } | |
747 | |
748 /* Given an EQ_EXPR expression EXP for values too wide to be compared | |
749 with one insn, test the comparison and jump to the appropriate label. */ | |
750 | |
751 static void | |
752 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label) | |
753 { | |
754 rtx op0 = expand_normal (TREE_OPERAND (exp, 0)); | |
755 rtx op1 = expand_normal (TREE_OPERAND (exp, 1)); | |
756 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
757 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, | |
758 if_true_label); | |
759 } | |
760 | |
761 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE. | |
762 MODE is the machine mode of the comparison, not of the result. | |
763 (including code to compute the values to be compared) and set CC0 | |
764 according to the result. The decision as to signed or unsigned | |
765 comparison must be made by the caller. | |
766 | |
767 We force a stack adjustment unless there are currently | |
768 things pushed on the stack that aren't yet used. | |
769 | |
770 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being | |
771 compared. */ | |
772 | |
773 rtx | |
774 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp, | |
775 enum machine_mode mode, rtx size) | |
776 { | |
777 rtx tem; | |
778 | |
779 /* If one operand is constant, make it the second one. Only do this | |
780 if the other operand is not constant as well. */ | |
781 | |
782 if (swap_commutative_operands_p (op0, op1)) | |
783 { | |
784 tem = op0; | |
785 op0 = op1; | |
786 op1 = tem; | |
787 code = swap_condition (code); | |
788 } | |
789 | |
790 do_pending_stack_adjust (); | |
791 | |
792 code = unsignedp ? unsigned_condition (code) : code; | |
793 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1); | |
794 if (tem) | |
795 { | |
796 if (CONSTANT_P (tem)) | |
797 return tem; | |
798 | |
799 if (COMPARISON_P (tem)) | |
800 { | |
801 code = GET_CODE (tem); | |
802 op0 = XEXP (tem, 0); | |
803 op1 = XEXP (tem, 1); | |
804 mode = GET_MODE (op0); | |
805 unsignedp = (code == GTU || code == LTU | |
806 || code == GEU || code == LEU); | |
807 } | |
808 } | |
809 | |
810 emit_cmp_insn (op0, op1, code, size, mode, unsignedp); | |
811 | |
812 #if HAVE_cc0 | |
813 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); | |
814 #else | |
815 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
816 #endif | |
817 } | |
818 | |
819 /* Like do_compare_and_jump but expects the values to compare as two rtx's. | |
820 The decision as to signed or unsigned comparison must be made by the caller. | |
821 | |
822 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being | |
823 compared. */ | |
824 | |
825 void | |
826 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, | |
827 enum machine_mode mode, rtx size, rtx if_false_label, | |
828 rtx if_true_label) | |
829 { | |
830 rtx tem; | |
831 int dummy_true_label = 0; | |
832 | |
833 /* Reverse the comparison if that is safe and we want to jump if it is | |
834 false. */ | |
835 if (! if_true_label && ! FLOAT_MODE_P (mode)) | |
836 { | |
837 if_true_label = if_false_label; | |
838 if_false_label = 0; | |
839 code = reverse_condition (code); | |
840 } | |
841 | |
842 /* If one operand is constant, make it the second one. Only do this | |
843 if the other operand is not constant as well. */ | |
844 | |
845 if (swap_commutative_operands_p (op0, op1)) | |
846 { | |
847 tem = op0; | |
848 op0 = op1; | |
849 op1 = tem; | |
850 code = swap_condition (code); | |
851 } | |
852 | |
853 do_pending_stack_adjust (); | |
854 | |
855 code = unsignedp ? unsigned_condition (code) : code; | |
856 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, | |
857 op0, op1))) | |
858 { | |
859 if (CONSTANT_P (tem)) | |
860 { | |
861 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode)) | |
862 ? if_false_label : if_true_label; | |
863 if (label) | |
864 emit_jump (label); | |
865 return; | |
866 } | |
867 | |
868 code = GET_CODE (tem); | |
869 mode = GET_MODE (tem); | |
870 op0 = XEXP (tem, 0); | |
871 op1 = XEXP (tem, 1); | |
872 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); | |
873 } | |
874 | |
875 | |
876 if (! if_true_label) | |
877 { | |
878 dummy_true_label = 1; | |
879 if_true_label = gen_label_rtx (); | |
880 } | |
881 | |
882 if (GET_MODE_CLASS (mode) == MODE_INT | |
883 && ! can_compare_p (code, mode, ccp_jump)) | |
884 { | |
885 switch (code) | |
886 { | |
887 case LTU: | |
888 do_jump_by_parts_greater_rtx (mode, 1, op1, op0, | |
889 if_false_label, if_true_label); | |
890 break; | |
891 | |
892 case LEU: | |
893 do_jump_by_parts_greater_rtx (mode, 1, op0, op1, | |
894 if_true_label, if_false_label); | |
895 break; | |
896 | |
897 case GTU: | |
898 do_jump_by_parts_greater_rtx (mode, 1, op0, op1, | |
899 if_false_label, if_true_label); | |
900 break; | |
901 | |
902 case GEU: | |
903 do_jump_by_parts_greater_rtx (mode, 1, op1, op0, | |
904 if_true_label, if_false_label); | |
905 break; | |
906 | |
907 case LT: | |
908 do_jump_by_parts_greater_rtx (mode, 0, op1, op0, | |
909 if_false_label, if_true_label); | |
910 break; | |
911 | |
912 case LE: | |
913 do_jump_by_parts_greater_rtx (mode, 0, op0, op1, | |
914 if_true_label, if_false_label); | |
915 break; | |
916 | |
917 case GT: | |
918 do_jump_by_parts_greater_rtx (mode, 0, op0, op1, | |
919 if_false_label, if_true_label); | |
920 break; | |
921 | |
922 case GE: | |
923 do_jump_by_parts_greater_rtx (mode, 0, op1, op0, | |
924 if_true_label, if_false_label); | |
925 break; | |
926 | |
927 case EQ: | |
928 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label, | |
929 if_true_label); | |
930 break; | |
931 | |
932 case NE: | |
933 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label, | |
934 if_false_label); | |
935 break; | |
936 | |
937 default: | |
938 gcc_unreachable (); | |
939 } | |
940 } | |
941 else | |
942 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, | |
943 if_true_label); | |
944 | |
945 if (if_false_label) | |
946 emit_jump (if_false_label); | |
947 if (dummy_true_label) | |
948 emit_label (if_true_label); | |
949 } | |
950 | |
951 /* Generate code for a comparison expression EXP (including code to compute | |
952 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or | |
953 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the | |
954 generated code will drop through. | |
955 SIGNED_CODE should be the rtx operation for this comparison for | |
956 signed data; UNSIGNED_CODE, likewise for use if data is unsigned. | |
957 | |
958 We force a stack adjustment unless there are currently | |
959 things pushed on the stack that aren't yet used. */ | |
960 | |
961 static void | |
962 do_compare_and_jump (tree exp, enum rtx_code signed_code, | |
963 enum rtx_code unsigned_code, rtx if_false_label, | |
964 rtx if_true_label) | |
965 { | |
966 rtx op0, op1; | |
967 tree type; | |
968 enum machine_mode mode; | |
969 int unsignedp; | |
970 enum rtx_code code; | |
971 | |
972 /* Don't crash if the comparison was erroneous. */ | |
973 op0 = expand_normal (TREE_OPERAND (exp, 0)); | |
974 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) | |
975 return; | |
976 | |
977 op1 = expand_normal (TREE_OPERAND (exp, 1)); | |
978 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) | |
979 return; | |
980 | |
981 type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
982 mode = TYPE_MODE (type); | |
983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST | |
984 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST | |
985 || (GET_MODE_BITSIZE (mode) | |
986 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, | |
987 1))))))) | |
988 { | |
989 /* op0 might have been replaced by promoted constant, in which | |
990 case the type of second argument should be used. */ | |
991 type = TREE_TYPE (TREE_OPERAND (exp, 1)); | |
992 mode = TYPE_MODE (type); | |
993 } | |
994 unsignedp = TYPE_UNSIGNED (type); | |
995 code = unsignedp ? unsigned_code : signed_code; | |
996 | |
997 #ifdef HAVE_canonicalize_funcptr_for_compare | |
998 /* If function pointers need to be "canonicalized" before they can | |
999 be reliably compared, then canonicalize them. | |
1000 Only do this if *both* sides of the comparison are function pointers. | |
1001 If one side isn't, we want a noncanonicalized comparison. See PR | |
1002 middle-end/17564. */ | |
1003 if (HAVE_canonicalize_funcptr_for_compare | |
1004 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
1005 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
1006 == FUNCTION_TYPE | |
1007 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
1008 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
1009 == FUNCTION_TYPE) | |
1010 { | |
1011 rtx new_op0 = gen_reg_rtx (mode); | |
1012 rtx new_op1 = gen_reg_rtx (mode); | |
1013 | |
1014 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); | |
1015 op0 = new_op0; | |
1016 | |
1017 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); | |
1018 op1 = new_op1; | |
1019 } | |
1020 #endif | |
1021 | |
1022 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, | |
1023 ((mode == BLKmode) | |
1024 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), | |
1025 if_false_label, if_true_label); | |
1026 } | |
1027 | |
1028 #include "gt-dojump.h" |