comparison gcc/fold-const.c @ 63:b7f97abdc517 gcc-4.6-20100522

update gcc from gcc-4.5.0 to gcc-4.6
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 24 May 2010 12:47:05 +0900
parents 77e2b8dfacca
children f6334be47118
comparison
equal deleted inserted replaced
56:3c8a44c06a95 63:b7f97abdc517
1 /* Fold a constant sub-tree into a single node for C-compiler 1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc. 4 Free Software Foundation, Inc.
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
8 GCC is free software; you can redistribute it and/or modify it under 8 GCC is free software; you can redistribute it and/or modify it under
51 #include "system.h" 51 #include "system.h"
52 #include "coretypes.h" 52 #include "coretypes.h"
53 #include "tm.h" 53 #include "tm.h"
54 #include "flags.h" 54 #include "flags.h"
55 #include "tree.h" 55 #include "tree.h"
56 #include "real.h" 56 #include "realmpfr.h"
57 #include "fixed-value.h"
58 #include "rtl.h" 57 #include "rtl.h"
59 #include "expr.h" 58 #include "expr.h"
60 #include "tm_p.h" 59 #include "tm_p.h"
61 #include "target.h" 60 #include "target.h"
62 #include "toplev.h" 61 #include "toplev.h"
91 COMPCODE_NE = 13, 90 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14, 91 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15 92 COMPCODE_TRUE = 15
94 }; 93 };
95 94
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function); 95 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree); 96 static bool negate_expr_p (tree);
100 static tree negate_expr (tree); 97 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); 98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree); 99 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
157 Overflow occurs if A and B have the same sign, but A and SUM differ in 154 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the 155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */ 156 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) 157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161 158
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
166
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
176
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
179 {
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
184 }
185
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
193 {
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
196 }
197
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
202
203 int
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
206 {
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = TYPE_PRECISION (type);
210 int sign_extended_type;
211
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
216
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 ;
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
223 {
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
227 }
228
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 {
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 }
241 else if (prec == HOST_BITS_PER_WIDE_INT)
242 {
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
245 }
246 else
247 {
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250 {
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
253 }
254 }
255
256 *lv = l1;
257 *hv = h1;
258
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
261 }
262
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
277
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
282 {
283 int sign_extended_type;
284 bool overflow;
285
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
290
291 overflow = fit_double_type (low, high, &low, &high, type);
292
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
295 {
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
299 {
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
306 }
307 }
308
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
311 }
312
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318
319 int
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
324 {
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
327
328 l = l1 + l2;
329 h = h1 + h2 + (l < l1);
330
331 *lv = l;
332 *hv = h;
333
334 if (unsigned_p)
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 else
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
338 }
339
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344
345 int
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 {
349 if (l1 == 0)
350 {
351 *lv = 0;
352 *hv = - h1;
353 return (*hv & h1) < 0;
354 }
355 else
356 {
357 *lv = -l1;
358 *hv = ~h1;
359 return 0;
360 }
361 }
362
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368
369 int
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
373 bool unsigned_p)
374 {
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
379 int i, j, k;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
382
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
385
386 memset (prod, 0, sizeof prod);
387
388 for (i = 0; i < 4; i++)
389 {
390 carry = 0;
391 for (j = 0; j < 4; j++)
392 {
393 k = i + j;
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 carry += prod[k];
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
400 }
401 prod[i + 4] = carry;
402 }
403
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
406
407 /* Unsigned overflow is immediate. */
408 if (unsigned_p)
409 return (toplow | tophigh) != 0;
410
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
413 if (h1 < 0)
414 {
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
417 }
418 if (h2 < 0)
419 {
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 }
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
424 }
425
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431
432 void
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 {
437 unsigned HOST_WIDE_INT signmask;
438
439 if (count < 0)
440 {
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
442 return;
443 }
444
445 if (SHIFT_COUNT_TRUNCATED)
446 count %= prec;
447
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 {
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
452 *hv = 0;
453 *lv = 0;
454 }
455 else if (count >= HOST_BITS_PER_WIDE_INT)
456 {
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
458 *lv = 0;
459 }
460 else
461 {
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
464 *lv = l1 << count;
465 }
466
467 /* Sign extend all bits that are beyond the precision. */
468
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
473
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 ;
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 {
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
480 }
481 else
482 {
483 *hv = signmask;
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
486 }
487 }
488
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493
494 void
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
498 int arith)
499 {
500 unsigned HOST_WIDE_INT signmask;
501
502 signmask = (arith
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
504 : 0);
505
506 if (SHIFT_COUNT_TRUNCATED)
507 count %= prec;
508
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 {
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
513 *hv = 0;
514 *lv = 0;
515 }
516 else if (count >= HOST_BITS_PER_WIDE_INT)
517 {
518 *hv = 0;
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
520 }
521 else
522 {
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 *lv = ((l1 >> count)
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
526 }
527
528 /* Zero / sign extend all bits that are beyond the precision. */
529
530 if (count >= (HOST_WIDE_INT)prec)
531 {
532 *hv = signmask;
533 *lv = signmask;
534 }
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 ;
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 {
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
541 }
542 else
543 {
544 *hv = signmask;
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
547 }
548 }
549
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554
555 void
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 {
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
562
563 count %= prec;
564 if (count < 0)
565 count += prec;
566
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
569 *lv = s1l | s2l;
570 *hv = s1h | s2h;
571 }
572
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576
577 void
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 {
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
584
585 count %= prec;
586 if (count < 0)
587 count += prec;
588
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 *lv = s1l | s2l;
592 *hv = s1h | s2h;
593 }
594
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 or EXACT_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
603
604 int
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
612 HOST_WIDE_INT *hrem)
613 {
614 int quo_neg = 0;
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
617 int i, j;
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
624 int overflow = 0;
625
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
628
629 /* Calculate quotient sign and convert operands to unsigned. */
630 if (!uns)
631 {
632 if (hnum < 0)
633 {
634 quo_neg = ~ quo_neg;
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
638 overflow = 1;
639 }
640 if (hden < 0)
641 {
642 quo_neg = ~ quo_neg;
643 neg_double (lden, hden, &lden, &hden);
644 }
645 }
646
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
649 *hquo = *hrem = 0;
650 /* This unsigned division rounds toward zero. */
651 *lquo = lnum / lden;
652 goto finish_up;
653 }
654
655 if (hnum == 0)
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
658 *hquo = *lquo = 0;
659 *hrem = hnum;
660 *lrem = lnum;
661 goto finish_up;
662 }
663
664 memset (quo, 0, sizeof quo);
665
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
668
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
671
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 {
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
677 {
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
680 carry = work % lden;
681 }
682 }
683 else
684 {
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
689
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
692 if (den[i] != 0)
693 {
694 den_hi_sig = i;
695 break;
696 }
697
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
700
701 scale = BASE / (den[den_hi_sig] + 1);
702 if (scale > 1)
703 { /* scale divisor and dividend */
704 carry = 0;
705 for (i = 0; i <= 4 - 1; i++)
706 {
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
710 }
711
712 num[4] = carry;
713 carry = 0;
714 for (i = 0; i <= 4 - 1; i++)
715 {
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
720 }
721 }
722
723 num_hi_sig = 4;
724
725 /* Main loop */
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 {
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
732
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
737 else
738 quo_est = BASE - 1;
739
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
742 if (tmp < BASE
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
745 quo_est--;
746
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
750
751 carry = 0;
752 for (j = 0; j <= den_hi_sig; j++)
753 {
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
759 }
760
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
764 {
765 quo_est--;
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
768 {
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
772 }
773
774 num [num_hi_sig] += carry;
775 }
776
777 /* Store the quotient digit. */
778 quo[i] = quo_est;
779 }
780 }
781
782 decode (quo, lquo, hquo);
783
784 finish_up:
785 /* If result is negative, make it so. */
786 if (quo_neg)
787 neg_double (*lquo, *hquo, lquo, hquo);
788
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793
794 switch (code)
795 {
796 case TRUNC_DIV_EXPR:
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
799 return overflow;
800
801 case FLOOR_DIV_EXPR:
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
804 {
805 /* quo = quo - 1; */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
807 lquo, hquo);
808 }
809 else
810 return overflow;
811 break;
812
813 case CEIL_DIV_EXPR:
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 {
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
819 }
820 else
821 return overflow;
822 break;
823
824 case ROUND_DIV_EXPR:
825 case ROUND_MOD_EXPR: /* round to closest integer */
826 {
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
831
832 /* Get absolute values. */
833 if (*hrem < 0)
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 if (hden < 0)
836 neg_double (lden, hden, &labs_den, &habs_den);
837
838 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, &ltwice, &htwice);
841
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den <= ltwice)))
847 {
848 if (*hquo < 0)
849 /* quo = quo - 1; */
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
852 else
853 /* quo = quo + 1; */
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
855 lquo, hquo);
856 }
857 else
858 return overflow;
859 }
860 break;
861
862 default:
863 gcc_unreachable ();
864 }
865
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
870 return overflow;
871 }
872
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division 159 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient. 160 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */ 161 Otherwise returns NULL_TREE. */
876 162
877 tree 163 tree
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) 164 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 { 165 {
880 unsigned HOST_WIDE_INT int1l, int2l; 166 double_int quo, rem;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 int uns; 167 int uns;
885 168
886 /* The sign of the division is according to operand two, that 169 /* The sign of the division is according to operand two, that
887 does the correct thing for POINTER_PLUS_EXPR where we want 170 does the correct thing for POINTER_PLUS_EXPR where we want
888 a signed division. */ 171 a signed division. */
889 uns = TYPE_UNSIGNED (TREE_TYPE (arg2)); 172 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
890 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE 173 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
891 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2))) 174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
892 uns = false; 175 uns = false;
893 176
894 int1l = TREE_INT_CST_LOW (arg1); 177 quo = double_int_divmod (tree_to_double_int (arg1),
895 int1h = TREE_INT_CST_HIGH (arg1); 178 tree_to_double_int (arg2),
896 int2l = TREE_INT_CST_LOW (arg2); 179 uns, code, &rem);
897 int2h = TREE_INT_CST_HIGH (arg2); 180
898 181 if (double_int_zero_p (rem))
899 div_and_round_double (code, uns, int1l, int1h, int2l, int2h, 182 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
900 &quol, &quoh, &reml, &remh); 183
901 if (remh != 0 || reml != 0) 184 return NULL_TREE;
902 return NULL_TREE;
903
904 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
905 } 185 }
906 186
907 /* This is nonzero if we should defer warnings about undefined 187 /* This is nonzero if we should defer warnings about undefined
908 overflow. This facility exists because these warnings are a 188 overflow. This facility exists because these warnings are a
909 special case. The code to estimate loop iterations does not want 189 special case. The code to estimate loop iterations does not want
1123 case BIT_NOT_EXPR: 403 case BIT_NOT_EXPR:
1124 return (INTEGRAL_TYPE_P (type) 404 return (INTEGRAL_TYPE_P (type)
1125 && TYPE_OVERFLOW_WRAPS (type)); 405 && TYPE_OVERFLOW_WRAPS (type));
1126 406
1127 case FIXED_CST: 407 case FIXED_CST:
1128 case REAL_CST:
1129 case NEGATE_EXPR: 408 case NEGATE_EXPR:
1130 return true; 409 return true;
410
411 case REAL_CST:
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1131 415
1132 case COMPLEX_CST: 416 case COMPLEX_CST:
1133 return negate_expr_p (TREE_REALPART (t)) 417 return negate_expr_p (TREE_REALPART (t))
1134 && negate_expr_p (TREE_IMAGPART (t)); 418 && negate_expr_p (TREE_IMAGPART (t));
1135 419
2242 1526
2243 /* Given an integer constant, make new constant with new type, 1527 /* Given an integer constant, make new constant with new type,
2244 appropriately sign-extended or truncated. */ 1528 appropriately sign-extended or truncated. */
2245 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1), 1529 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2246 TREE_INT_CST_HIGH (arg1), 1530 TREE_INT_CST_HIGH (arg1),
2247 /* Don't set the overflow when 1531 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2248 converting from a pointer, */
2249 !POINTER_TYPE_P (TREE_TYPE (arg1))
2250 /* or to a sizetype with same signedness
2251 and the precision is unchanged.
2252 ??? sizetype is always sign-extended,
2253 but its signedness depends on the
2254 frontend. Thus we see spurious overflows
2255 here if we do not check this. */
2256 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2257 == TYPE_PRECISION (type))
2258 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2259 == TYPE_UNSIGNED (type))
2260 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2261 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2262 || (TREE_CODE (type) == INTEGER_TYPE
2263 && TYPE_IS_SIZETYPE (type)))),
2264 (TREE_INT_CST_HIGH (arg1) < 0 1532 (TREE_INT_CST_HIGH (arg1) < 0
2265 && (TYPE_UNSIGNED (type) 1533 && (TYPE_UNSIGNED (type)
2266 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 1534 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2267 | TREE_OVERFLOW (arg1)); 1535 | TREE_OVERFLOW (arg1));
2268 1536
2285 INT_MAX are mapped to INT_MAX, and values less than INT_MIN 1553 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2286 are mapped to INT_MIN. These semantics are allowed by the 1554 are mapped to INT_MIN. These semantics are allowed by the
2287 C and C++ standards that simply state that the behavior of 1555 C and C++ standards that simply state that the behavior of
2288 FP-to-integer conversion is unspecified upon overflow. */ 1556 FP-to-integer conversion is unspecified upon overflow. */
2289 1557
2290 HOST_WIDE_INT high, low; 1558 double_int val;
2291 REAL_VALUE_TYPE r; 1559 REAL_VALUE_TYPE r;
2292 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); 1560 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2293 1561
2294 switch (code) 1562 switch (code)
2295 { 1563 {
2303 1571
2304 /* If R is NaN, return zero and show we have an overflow. */ 1572 /* If R is NaN, return zero and show we have an overflow. */
2305 if (REAL_VALUE_ISNAN (r)) 1573 if (REAL_VALUE_ISNAN (r))
2306 { 1574 {
2307 overflow = 1; 1575 overflow = 1;
2308 high = 0; 1576 val = double_int_zero;
2309 low = 0;
2310 } 1577 }
2311 1578
2312 /* See if R is less than the lower bound or greater than the 1579 /* See if R is less than the lower bound or greater than the
2313 upper bound. */ 1580 upper bound. */
2314 1581
2317 tree lt = TYPE_MIN_VALUE (type); 1584 tree lt = TYPE_MIN_VALUE (type);
2318 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); 1585 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2319 if (REAL_VALUES_LESS (r, l)) 1586 if (REAL_VALUES_LESS (r, l))
2320 { 1587 {
2321 overflow = 1; 1588 overflow = 1;
2322 high = TREE_INT_CST_HIGH (lt); 1589 val = tree_to_double_int (lt);
2323 low = TREE_INT_CST_LOW (lt);
2324 } 1590 }
2325 } 1591 }
2326 1592
2327 if (! overflow) 1593 if (! overflow)
2328 { 1594 {
2331 { 1597 {
2332 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); 1598 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2333 if (REAL_VALUES_LESS (u, r)) 1599 if (REAL_VALUES_LESS (u, r))
2334 { 1600 {
2335 overflow = 1; 1601 overflow = 1;
2336 high = TREE_INT_CST_HIGH (ut); 1602 val = tree_to_double_int (ut);
2337 low = TREE_INT_CST_LOW (ut);
2338 } 1603 }
2339 } 1604 }
2340 } 1605 }
2341 1606
2342 if (! overflow) 1607 if (! overflow)
2343 REAL_VALUE_TO_INT (&low, &high, r); 1608 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
2344 1609
2345 t = force_fit_type_double (type, low, high, -1, 1610 t = force_fit_type_double (type, val.low, val.high, -1,
2346 overflow | TREE_OVERFLOW (arg1)); 1611 overflow | TREE_OVERFLOW (arg1));
2347 return t; 1612 return t;
2348 } 1613 }
2349 1614
2350 /* A subroutine of fold_convert_const handling conversions of a 1615 /* A subroutine of fold_convert_const handling conversions of a
2360 /* Right shift FIXED_CST to temp by fbit. */ 1625 /* Right shift FIXED_CST to temp by fbit. */
2361 temp = TREE_FIXED_CST (arg1).data; 1626 temp = TREE_FIXED_CST (arg1).data;
2362 mode = TREE_FIXED_CST (arg1).mode; 1627 mode = TREE_FIXED_CST (arg1).mode;
2363 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT) 1628 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2364 { 1629 {
2365 lshift_double (temp.low, temp.high, 1630 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
2366 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT, 1631 HOST_BITS_PER_DOUBLE_INT,
2367 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode)); 1632 SIGNED_FIXED_POINT_MODE_P (mode));
2368 1633
2369 /* Left shift temp to temp_trunc by fbit. */ 1634 /* Left shift temp to temp_trunc by fbit. */
2370 lshift_double (temp.low, temp.high, 1635 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
2371 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT, 1636 HOST_BITS_PER_DOUBLE_INT,
2372 &temp_trunc.low, &temp_trunc.high, 1637 SIGNED_FIXED_POINT_MODE_P (mode));
2373 SIGNED_FIXED_POINT_MODE_P (mode));
2374 } 1638 }
2375 else 1639 else
2376 { 1640 {
2377 temp.low = 0; 1641 temp = double_int_zero;
2378 temp.high = 0; 1642 temp_trunc = double_int_zero;
2379 temp_trunc.low = 0;
2380 temp_trunc.high = 0;
2381 } 1643 }
2382 1644
2383 /* If FIXED_CST is negative, we need to round the value toward 0. 1645 /* If FIXED_CST is negative, we need to round the value toward 0.
2384 By checking if the fractional bits are not zero to add 1 to temp. */ 1646 By checking if the fractional bits are not zero to add 1 to temp. */
2385 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0 1647 if (SIGNED_FIXED_POINT_MODE_P (mode)
1648 && double_int_negative_p (temp_trunc)
2386 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc)) 1649 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2387 { 1650 temp = double_int_add (temp, double_int_one);
2388 double_int one;
2389 one.low = 1;
2390 one.high = 0;
2391 temp = double_int_add (temp, one);
2392 }
2393 1651
2394 /* Given a fixed-point constant, make new constant with new type, 1652 /* Given a fixed-point constant, make new constant with new type,
2395 appropriately sign-extended or truncated. */ 1653 appropriately sign-extended or truncated. */
2396 t = force_fit_type_double (type, temp.low, temp.high, -1, 1654 t = force_fit_type_double (type, temp.low, temp.high, -1,
2397 (temp.high < 0 1655 (double_int_negative_p (temp)
2398 && (TYPE_UNSIGNED (type) 1656 && (TYPE_UNSIGNED (type)
2399 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 1657 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2400 | TREE_OVERFLOW (arg1)); 1658 | TREE_OVERFLOW (arg1));
2401 1659
2402 return t; 1660 return t;
3155 2413
3156 int 2414 int
3157 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) 2415 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3158 { 2416 {
3159 /* If either is ERROR_MARK, they aren't equal. */ 2417 /* If either is ERROR_MARK, they aren't equal. */
3160 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK) 2418 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2419 || TREE_TYPE (arg0) == error_mark_node
2420 || TREE_TYPE (arg1) == error_mark_node)
2421 return 0;
2422
2423 /* Similar, if either does not have a type (like a released SSA name),
2424 they aren't equal. */
2425 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3161 return 0; 2426 return 0;
3162 2427
3163 /* Check equality of integer constants before bailing out due to 2428 /* Check equality of integer constants before bailing out due to
3164 precision differences. */ 2429 precision differences. */
3165 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 2430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5729 4994
5730 return fold_convert (type, 4995 return fold_convert (type,
5731 const_binop (BIT_XOR_EXPR, c, temp, 0)); 4996 const_binop (BIT_XOR_EXPR, c, temp, 0));
5732 } 4997 }
5733 4998
4999 /* For an expression that has the form
5000 (A && B) || ~B
5001 or
5002 (A || B) && ~B,
5003 we can drop one of the inner expressions and simplify to
5004 A || ~B
5005 or
5006 A && ~B
5007 LOC is the location of the resulting expression. OP is the inner
5008 logical operation; the left-hand side in the examples above, while CMPOP
5009 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5010 removing a condition that guards another, as in
5011 (A != NULL && A->...) || A == NULL
5012 which we must not transform. If RHS_ONLY is true, only eliminate the
5013 right-most operand of the inner logical operation. */
5014
5015 static tree
5016 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5017 bool rhs_only)
5018 {
5019 tree type = TREE_TYPE (cmpop);
5020 enum tree_code code = TREE_CODE (cmpop);
5021 enum tree_code truthop_code = TREE_CODE (op);
5022 tree lhs = TREE_OPERAND (op, 0);
5023 tree rhs = TREE_OPERAND (op, 1);
5024 tree orig_lhs = lhs, orig_rhs = rhs;
5025 enum tree_code rhs_code = TREE_CODE (rhs);
5026 enum tree_code lhs_code = TREE_CODE (lhs);
5027 enum tree_code inv_code;
5028
5029 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5030 return NULL_TREE;
5031
5032 if (TREE_CODE_CLASS (code) != tcc_comparison)
5033 return NULL_TREE;
5034
5035 if (rhs_code == truthop_code)
5036 {
5037 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5038 if (newrhs != NULL_TREE)
5039 {
5040 rhs = newrhs;
5041 rhs_code = TREE_CODE (rhs);
5042 }
5043 }
5044 if (lhs_code == truthop_code && !rhs_only)
5045 {
5046 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5047 if (newlhs != NULL_TREE)
5048 {
5049 lhs = newlhs;
5050 lhs_code = TREE_CODE (lhs);
5051 }
5052 }
5053
5054 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5055 if (inv_code == rhs_code
5056 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5057 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5058 return lhs;
5059 if (!rhs_only && inv_code == lhs_code
5060 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5061 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5062 return rhs;
5063 if (rhs != orig_rhs || lhs != orig_lhs)
5064 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5065 lhs, rhs);
5066 return NULL_TREE;
5067 }
5068
5734 /* Find ways of folding logical expressions of LHS and RHS: 5069 /* Find ways of folding logical expressions of LHS and RHS:
5735 Try to merge two comparisons to the same innermost item. 5070 Try to merge two comparisons to the same innermost item.
5736 Look for range tests like "ch >= '0' && ch <= '9'". 5071 Look for range tests like "ch >= '0' && ch <= '9'".
5737 Look for combinations of simple terms on machines with expensive branches 5072 Look for combinations of simple terms on machines with expensive branches
5738 and evaluate the RHS unconditionally. 5073 and evaluate the RHS unconditionally.
6714 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); 6049 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6715 tree test, true_value, false_value; 6050 tree test, true_value, false_value;
6716 tree lhs = NULL_TREE; 6051 tree lhs = NULL_TREE;
6717 tree rhs = NULL_TREE; 6052 tree rhs = NULL_TREE;
6718 6053
6719 /* This transformation is only worthwhile if we don't have to wrap
6720 arg in a SAVE_EXPR, and the operation can be simplified on at least
6721 one of the branches once its pushed inside the COND_EXPR. */
6722 if (!TREE_CONSTANT (arg))
6723 return NULL_TREE;
6724
6725 if (TREE_CODE (cond) == COND_EXPR) 6054 if (TREE_CODE (cond) == COND_EXPR)
6726 { 6055 {
6727 test = TREE_OPERAND (cond, 0); 6056 test = TREE_OPERAND (cond, 0);
6728 true_value = TREE_OPERAND (cond, 1); 6057 true_value = TREE_OPERAND (cond, 1);
6729 false_value = TREE_OPERAND (cond, 2); 6058 false_value = TREE_OPERAND (cond, 2);
6741 test = cond; 6070 test = cond;
6742 true_value = constant_boolean_node (true, testtype); 6071 true_value = constant_boolean_node (true, testtype);
6743 false_value = constant_boolean_node (false, testtype); 6072 false_value = constant_boolean_node (false, testtype);
6744 } 6073 }
6745 6074
6075 /* This transformation is only worthwhile if we don't have to wrap ARG
6076 in a SAVE_EXPR and the operation can be simplified on at least one
6077 of the branches once its pushed inside the COND_EXPR. */
6078 if (!TREE_CONSTANT (arg)
6079 && (TREE_SIDE_EFFECTS (arg)
6080 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6081 return NULL_TREE;
6082
6746 arg = fold_convert_loc (loc, arg_type, arg); 6083 arg = fold_convert_loc (loc, arg_type, arg);
6747 if (lhs == 0) 6084 if (lhs == 0)
6748 { 6085 {
6749 true_value = fold_convert_loc (loc, cond_type, true_value); 6086 true_value = fold_convert_loc (loc, cond_type, true_value);
6750 if (cond_first_p) 6087 if (cond_first_p)
6759 rhs = fold_build2_loc (loc, code, type, false_value, arg); 6096 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6760 else 6097 else
6761 rhs = fold_build2_loc (loc, code, type, arg, false_value); 6098 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6762 } 6099 }
6763 6100
6764 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); 6101 /* Check that we have simplified at least one of the branches. */
6765 return fold_convert_loc (loc, type, test); 6102 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6103 return NULL_TREE;
6104
6105 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6766 } 6106 }
6767 6107
6768 6108
6769 /* Subroutine of fold() that checks for the addition of +/- 0.0. 6109 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6770 6110
8065 native_interpret_int (tree type, const unsigned char *ptr, int len) 7405 native_interpret_int (tree type, const unsigned char *ptr, int len)
8066 { 7406 {
8067 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7407 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8068 int byte, offset, word, words; 7408 int byte, offset, word, words;
8069 unsigned char value; 7409 unsigned char value;
8070 unsigned int HOST_WIDE_INT lo = 0; 7410 double_int result;
8071 HOST_WIDE_INT hi = 0;
8072 7411
8073 if (total_bytes > len) 7412 if (total_bytes > len)
8074 return NULL_TREE; 7413 return NULL_TREE;
8075 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT) 7414 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8076 return NULL_TREE; 7415 return NULL_TREE;
7416
7417 result = double_int_zero;
8077 words = total_bytes / UNITS_PER_WORD; 7418 words = total_bytes / UNITS_PER_WORD;
8078 7419
8079 for (byte = 0; byte < total_bytes; byte++) 7420 for (byte = 0; byte < total_bytes; byte++)
8080 { 7421 {
8081 int bitpos = byte * BITS_PER_UNIT; 7422 int bitpos = byte * BITS_PER_UNIT;
8093 else 7434 else
8094 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7435 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8095 value = ptr[offset]; 7436 value = ptr[offset];
8096 7437
8097 if (bitpos < HOST_BITS_PER_WIDE_INT) 7438 if (bitpos < HOST_BITS_PER_WIDE_INT)
8098 lo |= (unsigned HOST_WIDE_INT) value << bitpos; 7439 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
8099 else 7440 else
8100 hi |= (unsigned HOST_WIDE_INT) value 7441 result.high |= (unsigned HOST_WIDE_INT) value
8101 << (bitpos - HOST_BITS_PER_WIDE_INT); 7442 << (bitpos - HOST_BITS_PER_WIDE_INT);
8102 } 7443 }
8103 7444
8104 return build_int_cst_wide_type (type, lo, hi); 7445 return double_int_to_tree (type, result);
8105 } 7446 }
8106 7447
8107 7448
8108 /* Subroutine of native_interpret_expr. Interpret the contents of 7449 /* Subroutine of native_interpret_expr. Interpret the contents of
8109 the buffer PTR of length LEN as a REAL_CST of type TYPE. 7450 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8544 tree base = TREE_OPERAND (op0, 0); 7885 tree base = TREE_OPERAND (op0, 0);
8545 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 7886 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8546 &mode, &unsignedp, &volatilep, false); 7887 &mode, &unsignedp, &volatilep, false);
8547 /* If the reference was to a (constant) zero offset, we can use 7888 /* If the reference was to a (constant) zero offset, we can use
8548 the address of the base if it has the same base type 7889 the address of the base if it has the same base type
8549 as the result type. */ 7890 as the result type and the pointer type is unqualified. */
8550 if (! offset && bitpos == 0 7891 if (! offset && bitpos == 0
8551 && TYPE_MAIN_VARIANT (TREE_TYPE (type)) 7892 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8552 == TYPE_MAIN_VARIANT (TREE_TYPE (base))) 7893 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7894 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8553 return fold_convert_loc (loc, type, 7895 return fold_convert_loc (loc, type,
8554 build_fold_addr_expr_loc (loc, base)); 7896 build_fold_addr_expr_loc (loc, base));
8555 } 7897 }
8556 7898
8557 if (TREE_CODE (op0) == MODIFY_EXPR 7899 if (TREE_CODE (op0) == MODIFY_EXPR
8940 break; 8282 break;
8941 } 8283 }
8942 } 8284 }
8943 return NULL_TREE; 8285 return NULL_TREE;
8944 8286
8287 case INDIRECT_REF:
8288 /* Fold *&X to X if X is an lvalue. */
8289 if (TREE_CODE (op0) == ADDR_EXPR)
8290 {
8291 tree op00 = TREE_OPERAND (op0, 0);
8292 if ((TREE_CODE (op00) == VAR_DECL
8293 || TREE_CODE (op00) == PARM_DECL
8294 || TREE_CODE (op00) == RESULT_DECL)
8295 && !TREE_READONLY (op00))
8296 return op00;
8297 }
8298 return NULL_TREE;
8299
8945 default: 8300 default:
8946 return NULL_TREE; 8301 return NULL_TREE;
8947 } /* switch (code) */ 8302 } /* switch (code) */
8948 } 8303 }
8949 8304
9356 { 8711 {
9357 base1 = TREE_OPERAND (arg1, 0); 8712 base1 = TREE_OPERAND (arg1, 0);
9358 offset1 = TREE_OPERAND (arg1, 1); 8713 offset1 = TREE_OPERAND (arg1, 1);
9359 } 8714 }
9360 8715
8716 /* A local variable can never be pointed to by
8717 the default SSA name of an incoming parameter. */
8718 if ((TREE_CODE (arg0) == ADDR_EXPR
8719 && indirect_base0
8720 && TREE_CODE (base0) == VAR_DECL
8721 && auto_var_in_fn_p (base0, current_function_decl)
8722 && !indirect_base1
8723 && TREE_CODE (base1) == SSA_NAME
8724 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8725 && SSA_NAME_IS_DEFAULT_DEF (base1))
8726 || (TREE_CODE (arg1) == ADDR_EXPR
8727 && indirect_base1
8728 && TREE_CODE (base1) == VAR_DECL
8729 && auto_var_in_fn_p (base1, current_function_decl)
8730 && !indirect_base0
8731 && TREE_CODE (base0) == SSA_NAME
8732 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8733 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8734 {
8735 if (code == NE_EXPR)
8736 return constant_boolean_node (1, type);
8737 else if (code == EQ_EXPR)
8738 return constant_boolean_node (0, type);
8739 }
9361 /* If we have equivalent bases we might be able to simplify. */ 8740 /* If we have equivalent bases we might be able to simplify. */
9362 if (indirect_base0 == indirect_base1 8741 else if (indirect_base0 == indirect_base1
9363 && operand_equal_p (base0, base1, 0)) 8742 && operand_equal_p (base0, base1, 0))
9364 { 8743 {
9365 /* We can fold this expression to a constant if the non-constant 8744 /* We can fold this expression to a constant if the non-constant
9366 offset parts are equal. */ 8745 offset parts are equal. */
9367 if ((offset0 == offset1 8746 if ((offset0 == offset1
9368 || (offset0 && offset1 8747 || (offset0 && offset1
9408 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ 8787 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9409 else if (bitpos0 == bitpos1 8788 else if (bitpos0 == bitpos1
9410 && ((code == EQ_EXPR || code == NE_EXPR) 8789 && ((code == EQ_EXPR || code == NE_EXPR)
9411 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 8790 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9412 { 8791 {
9413 tree signed_size_type_node;
9414 signed_size_type_node = signed_type_for (size_type_node);
9415
9416 /* By converting to signed size type we cover middle-end pointer 8792 /* By converting to signed size type we cover middle-end pointer
9417 arithmetic which operates on unsigned pointer types of size 8793 arithmetic which operates on unsigned pointer types of size
9418 type size and ARRAY_REF offsets which are properly sign or 8794 type size and ARRAY_REF offsets which are properly sign or
9419 zero extended from their type in case it is narrower than 8795 zero extended from their type in case it is narrower than
9420 size type. */ 8796 size type. */
9421 if (offset0 == NULL_TREE) 8797 if (offset0 == NULL_TREE)
9422 offset0 = build_int_cst (signed_size_type_node, 0); 8798 offset0 = build_int_cst (ssizetype, 0);
9423 else 8799 else
9424 offset0 = fold_convert_loc (loc, signed_size_type_node, 8800 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9425 offset0);
9426 if (offset1 == NULL_TREE) 8801 if (offset1 == NULL_TREE)
9427 offset1 = build_int_cst (signed_size_type_node, 0); 8802 offset1 = build_int_cst (ssizetype, 0);
9428 else 8803 else
9429 offset1 = fold_convert_loc (loc, signed_size_type_node, 8804 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9430 offset1);
9431 8805
9432 if (code != EQ_EXPR 8806 if (code != EQ_EXPR
9433 && code != NE_EXPR 8807 && code != NE_EXPR
9434 && (pointer_may_wrap_p (base0, offset0, bitpos0) 8808 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9435 || pointer_may_wrap_p (base1, offset1, bitpos1))) 8809 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9551 tree const1 = TREE_OPERAND (arg0, 1); 8925 tree const1 = TREE_OPERAND (arg0, 1);
9552 tree const2 = arg1; /* zero */ 8926 tree const2 = arg1; /* zero */
9553 tree variable1 = TREE_OPERAND (arg0, 0); 8927 tree variable1 = TREE_OPERAND (arg0, 0);
9554 enum tree_code cmp_code = code; 8928 enum tree_code cmp_code = code;
9555 8929
9556 gcc_assert (!integer_zerop (const1)); 8930 /* Handle unfolded multiplication by zero. */
8931 if (integer_zerop (const1))
8932 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9557 8933
9558 fold_overflow_warning (("assuming signed overflow does not occur when " 8934 fold_overflow_warning (("assuming signed overflow does not occur when "
9559 "eliminating multiplication in comparison " 8935 "eliminating multiplication in comparison "
9560 "with zero"), 8936 "with zero"),
9561 WARN_STRICT_OVERFLOW_COMPARISON); 8937 WARN_STRICT_OVERFLOW_COMPARISON);
9600 /* (-a) CMP CST -> a swap(CMP) (-CST) */ 8976 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9601 if (TREE_CODE (arg0) == NEGATE_EXPR) 8977 if (TREE_CODE (arg0) == NEGATE_EXPR)
9602 return fold_build2_loc (loc, swap_tree_comparison (code), type, 8978 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9603 TREE_OPERAND (arg0, 0), 8979 TREE_OPERAND (arg0, 0),
9604 build_real (TREE_TYPE (arg1), 8980 build_real (TREE_TYPE (arg1),
9605 REAL_VALUE_NEGATE (cst))); 8981 real_value_negate (&cst)));
9606 8982
9607 /* IEEE doesn't distinguish +0 and -0 in comparisons. */ 8983 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9608 /* a CMP (-0) -> a CMP 0 */ 8984 /* a CMP (-0) -> a CMP 0 */
9609 if (REAL_VALUE_MINUS_ZERO (cst)) 8985 if (REAL_VALUE_MINUS_ZERO (cst))
9610 return fold_build2_loc (loc, code, type, arg0, 8986 return fold_build2_loc (loc, code, type, arg0,
10595 expressions for the sum of a constant and literal. */ 9971 expressions for the sum of a constant and literal. */
10596 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0); 9972 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10597 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1, 9973 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10598 code == MINUS_EXPR); 9974 code == MINUS_EXPR);
10599 9975
10600 /* With undefined overflow we can only associate constants 9976 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10601 with one variable. */ 9977 if (code == MINUS_EXPR)
10602 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) 9978 code = PLUS_EXPR;
10603 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) 9979
10604 && var0 && var1) 9980 /* With undefined overflow we can only associate constants with one
9981 variable, and constants whose association doesn't overflow. */
9982 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9983 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10605 { 9984 {
10606 tree tmp0 = var0; 9985 if (var0 && var1)
10607 tree tmp1 = var1; 9986 {
10608 9987 tree tmp0 = var0;
10609 if (TREE_CODE (tmp0) == NEGATE_EXPR) 9988 tree tmp1 = var1;
10610 tmp0 = TREE_OPERAND (tmp0, 0); 9989
10611 if (TREE_CODE (tmp1) == NEGATE_EXPR) 9990 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10612 tmp1 = TREE_OPERAND (tmp1, 0); 9991 tmp0 = TREE_OPERAND (tmp0, 0);
10613 /* The only case we can still associate with two variables 9992 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10614 is if they are the same, modulo negation. */ 9993 tmp1 = TREE_OPERAND (tmp1, 0);
10615 if (!operand_equal_p (tmp0, tmp1, 0)) 9994 /* The only case we can still associate with two variables
10616 ok = false; 9995 is if they are the same, modulo negation. */
9996 if (!operand_equal_p (tmp0, tmp1, 0))
9997 ok = false;
9998 }
9999
10000 if (ok && lit0 && lit1)
10001 {
10002 tree tmp0 = fold_convert (type, lit0);
10003 tree tmp1 = fold_convert (type, lit1);
10004
10005 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10006 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10007 ok = false;
10008 }
10617 } 10009 }
10618 10010
10619 /* Only do something if we found more than two objects. Otherwise, 10011 /* Only do something if we found more than two objects. Otherwise,
10620 nothing has changed and we risk infinite recursion. */ 10012 nothing has changed and we risk infinite recursion. */
10621 if (ok 10013 if (ok
10622 && (2 < ((var0 != 0) + (var1 != 0) 10014 && (2 < ((var0 != 0) + (var1 != 0)
10623 + (con0 != 0) + (con1 != 0) 10015 + (con0 != 0) + (con1 != 0)
10624 + (lit0 != 0) + (lit1 != 0) 10016 + (lit0 != 0) + (lit1 != 0)
10625 + (minus_lit0 != 0) + (minus_lit1 != 0)))) 10017 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10626 { 10018 {
10627 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10628 if (code == MINUS_EXPR)
10629 code = PLUS_EXPR;
10630
10631 var0 = associate_trees (loc, var0, var1, code, type); 10019 var0 = associate_trees (loc, var0, var1, code, type);
10632 con0 = associate_trees (loc, con0, con1, code, type); 10020 con0 = associate_trees (loc, con0, con1, code, type);
10633 lit0 = associate_trees (loc, lit0, lit1, code, type); 10021 lit0 = associate_trees (loc, lit0, lit1, code, type);
10634 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type); 10022 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10635 10023
12189 && TREE_CODE (arg1) == INTEGER_CST 11577 && TREE_CODE (arg1) == INTEGER_CST
12190 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 11578 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12191 && TREE_INT_CST_HIGH (arg1) == -1) 11579 && TREE_INT_CST_HIGH (arg1) == -1)
12192 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 11580 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12193 11581
12194 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12195 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12196 strict_overflow_p = false;
12197 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12198 && (TYPE_UNSIGNED (type)
12199 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12200 {
12201 tree c = arg1;
12202 /* Also optimize A % (C << N) where C is a power of 2,
12203 to A & ((C << N) - 1). */
12204 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12205 c = TREE_OPERAND (arg1, 0);
12206
12207 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12208 {
12209 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12210 build_int_cst (TREE_TYPE (arg1), 1));
12211 if (strict_overflow_p)
12212 fold_overflow_warning (("assuming signed overflow does not "
12213 "occur when simplifying "
12214 "X % (power of two)"),
12215 WARN_STRICT_OVERFLOW_MISC);
12216 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12217 fold_convert_loc (loc, type, arg0),
12218 fold_convert_loc (loc, type, mask));
12219 }
12220 }
12221
12222 /* X % -C is the same as X % C. */ 11582 /* X % -C is the same as X % C. */
12223 if (code == TRUNC_MOD_EXPR 11583 if (code == TRUNC_MOD_EXPR
12224 && !TYPE_UNSIGNED (type) 11584 && !TYPE_UNSIGNED (type)
12225 && TREE_CODE (arg1) == INTEGER_CST 11585 && TREE_CODE (arg1) == INTEGER_CST
12226 && !TREE_OVERFLOW (arg1) 11586 && !TREE_OVERFLOW (arg1)
12240 && !TYPE_OVERFLOW_TRAPS (type)) 11600 && !TYPE_OVERFLOW_TRAPS (type))
12241 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0), 11601 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12242 fold_convert_loc (loc, type, 11602 fold_convert_loc (loc, type,
12243 TREE_OPERAND (arg1, 0))); 11603 TREE_OPERAND (arg1, 0)));
12244 11604
11605 strict_overflow_p = false;
12245 if (TREE_CODE (arg1) == INTEGER_CST 11606 if (TREE_CODE (arg1) == INTEGER_CST
12246 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 11607 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12247 &strict_overflow_p))) 11608 &strict_overflow_p)))
12248 { 11609 {
12249 if (strict_overflow_p) 11610 if (strict_overflow_p)
12250 fold_overflow_warning (("assuming signed overflow does not occur " 11611 fold_overflow_warning (("assuming signed overflow does not occur "
12251 "when simplifying modulus"), 11612 "when simplifying modulus"),
12252 WARN_STRICT_OVERFLOW_MISC); 11613 WARN_STRICT_OVERFLOW_MISC);
12253 return fold_convert_loc (loc, type, tem); 11614 return fold_convert_loc (loc, type, tem);
11615 }
11616
11617 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11618 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11619 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11620 && (TYPE_UNSIGNED (type)
11621 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11622 {
11623 tree c = arg1;
11624 /* Also optimize A % (C << N) where C is a power of 2,
11625 to A & ((C << N) - 1). */
11626 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11627 c = TREE_OPERAND (arg1, 0);
11628
11629 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11630 {
11631 tree mask
11632 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11633 build_int_cst (TREE_TYPE (arg1), 1));
11634 if (strict_overflow_p)
11635 fold_overflow_warning (("assuming signed overflow does not "
11636 "occur when simplifying "
11637 "X % (power of two)"),
11638 WARN_STRICT_OVERFLOW_MISC);
11639 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11640 fold_convert_loc (loc, type, arg0),
11641 fold_convert_loc (loc, type, mask));
11642 }
12254 } 11643 }
12255 11644
12256 return NULL_TREE; 11645 return NULL_TREE;
12257 11646
12258 case LROTATE_EXPR: 11647 case LROTATE_EXPR:
12511 } 11900 }
12512 11901
12513 /* See if we can build a range comparison. */ 11902 /* See if we can build a range comparison. */
12514 if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) 11903 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12515 return tem; 11904 return tem;
11905
11906 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11907 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11908 {
11909 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11910 if (tem)
11911 return fold_build2_loc (loc, code, type, tem, arg1);
11912 }
11913
11914 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11915 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11916 {
11917 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11918 if (tem)
11919 return fold_build2_loc (loc, code, type, arg0, tem);
11920 }
12516 11921
12517 /* Check for the possibility of merging component references. If our 11922 /* Check for the possibility of merging component references. If our
12518 lhs is another similar operation, try to merge its rhs with our 11923 lhs is another similar operation, try to merge its rhs with our
12519 rhs. Then try to merge our lhs and rhs. */ 11924 rhs. Then try to merge our lhs and rhs. */
12520 if (TREE_CODE (arg0) == code 11925 if (TREE_CODE (arg0) == code
14786 14191
14787 /* .. fall through ... */ 14192 /* .. fall through ... */
14788 14193
14789 case SAVE_EXPR: 14194 case SAVE_EXPR:
14790 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom); 14195 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14196
14197 case COND_EXPR:
14198 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14199 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14791 14200
14792 case INTEGER_CST: 14201 case INTEGER_CST:
14793 if (TREE_CODE (bottom) != INTEGER_CST 14202 if (TREE_CODE (bottom) != INTEGER_CST
14794 || integer_zerop (bottom) 14203 || integer_zerop (bottom)
14795 || (TYPE_UNSIGNED (type) 14204 || (TYPE_UNSIGNED (type)
15506 case INTEGER_CST: 14915 case INTEGER_CST:
15507 return !integer_zerop (t); 14916 return !integer_zerop (t);
15508 14917
15509 case ADDR_EXPR: 14918 case ADDR_EXPR:
15510 { 14919 {
15511 tree base = get_base_address (TREE_OPERAND (t, 0)); 14920 tree base = TREE_OPERAND (t, 0);
14921 if (!DECL_P (base))
14922 base = get_base_address (base);
15512 14923
15513 if (!base) 14924 if (!base)
15514 return false; 14925 return false;
15515 14926
15516 /* Weak declarations may link to NULL. Other things may also be NULL 14927 /* Weak declarations may link to NULL. Other things may also be NULL
15517 so protect with -fdelete-null-pointer-checks; but not variables 14928 so protect with -fdelete-null-pointer-checks; but not variables
15518 allocated on the stack. */ 14929 allocated on the stack. */
15519 if (DECL_P (base) 14930 if (DECL_P (base)
15520 && (flag_delete_null_pointer_checks 14931 && (flag_delete_null_pointer_checks
15521 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base)))) 14932 || (DECL_CONTEXT (base)
14933 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14934 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15522 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base); 14935 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15523 14936
15524 /* Constants are never weak. */ 14937 /* Constants are never weak. */
15525 if (CONSTANT_CLASS_P (base)) 14938 if (CONSTANT_CLASS_P (base))
15526 return true; 14939 return true;
15752 && !TYPE_UNSIGNED (type)); 15165 && !TYPE_UNSIGNED (type));
15753 break; 15166 break;
15754 } 15167 }
15755 15168
15756 case REAL_CST: 15169 case REAL_CST:
15757 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); 15170 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15758 break; 15171 break;
15759 15172
15760 case FIXED_CST: 15173 case FIXED_CST:
15761 { 15174 {
15762 FIXED_VALUE_TYPE f; 15175 FIXED_VALUE_TYPE f;
15811 } 15224 }
15812 break; 15225 break;
15813 15226
15814 case REAL_CST: 15227 case REAL_CST:
15815 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) 15228 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15816 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); 15229 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15817 else 15230 else
15818 t = arg0; 15231 t = arg0;
15819 break; 15232 break;
15820 15233
15821 default: 15234 default: