1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 static tree fold_negate_const (tree, tree);
117 static tree fold_abs_const (tree, tree);
118 static tree fold_relational_const (enum tree_code, tree, tree, tree);
120 /* The following constants represent a bit based encoding of GCC's
121 comparison operators. This encoding simplifies transformations
122 on relational comparison operators, such as AND and OR. */
123 #define COMPCODE_FALSE 0
124 #define COMPCODE_LT 1
125 #define COMPCODE_EQ 2
126 #define COMPCODE_LE 3
127 #define COMPCODE_GT 4
128 #define COMPCODE_NE 5
129 #define COMPCODE_GE 6
130 #define COMPCODE_TRUE 7
132 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
133 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
134 and SUM1. Then this yields nonzero if overflow occurred during the
137 Overflow occurs if A and B have the same sign, but A and SUM differ in
138 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
140 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
142 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
143 We do that by representing the two-word integer in 4 words, with only
144 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
145 number. The value of the word is LOWPART + HIGHPART * BASE. */
148 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
149 #define HIGHPART(x) \
150 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
151 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
153 /* Unpack a two-word integer into 4 words.
154 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
155 WORDS points to the array of HOST_WIDE_INTs. */
158 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
160 words[0] = LOWPART (low);
161 words[1] = HIGHPART (low);
162 words[2] = LOWPART (hi);
163 words[3] = HIGHPART (hi);
166 /* Pack an array of 4 words into a two-word integer.
167 WORDS points to the array of words.
168 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
171 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
174 *low = words[0] + words[1] * BASE;
175 *hi = words[2] + words[3] * BASE;
178 /* Make the integer constant T valid for its type by setting to 0 or 1 all
179 the bits in the constant that don't belong in the type.
181 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
182 nonzero, a signed overflow has already occurred in calculating T, so
186 force_fit_type (tree t, int overflow)
188 unsigned HOST_WIDE_INT low;
192 if (TREE_CODE (t) == REAL_CST)
194 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
195 Consider doing it via real_convert now. */
199 else if (TREE_CODE (t) != INTEGER_CST)
202 low = TREE_INT_CST_LOW (t);
203 high = TREE_INT_CST_HIGH (t);
205 if (POINTER_TYPE_P (TREE_TYPE (t))
206 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
209 prec = TYPE_PRECISION (TREE_TYPE (t));
211 /* First clear all bits that are beyond the type's precision. */
213 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
215 else if (prec > HOST_BITS_PER_WIDE_INT)
216 TREE_INT_CST_HIGH (t)
217 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
220 TREE_INT_CST_HIGH (t) = 0;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
225 /* Unsigned types do not suffer sign extension or overflow unless they
227 if (TYPE_UNSIGNED (TREE_TYPE (t))
228 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
232 /* If the value's sign bit is set, extend the sign. */
233 if (prec != 2 * HOST_BITS_PER_WIDE_INT
234 && (prec > HOST_BITS_PER_WIDE_INT
235 ? 0 != (TREE_INT_CST_HIGH (t)
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 : 0 != (TREE_INT_CST_LOW (t)
239 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
241 /* Value is negative:
242 set to 1 all the bits that are outside this type's precision. */
243 if (prec > HOST_BITS_PER_WIDE_INT)
244 TREE_INT_CST_HIGH (t)
245 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
248 TREE_INT_CST_HIGH (t) = -1;
249 if (prec < HOST_BITS_PER_WIDE_INT)
250 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
254 /* Return nonzero if signed overflow occurred. */
256 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
260 /* Add two doubleword integers with doubleword result.
261 Each argument is given as two `HOST_WIDE_INT' pieces.
262 One argument is L1 and H1; the other, L2 and H2.
263 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
266 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
267 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
268 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
270 unsigned HOST_WIDE_INT l;
274 h = h1 + h2 + (l < l1);
278 return OVERFLOW_SUM_SIGN (h1, h2, h);
281 /* Negate a doubleword integer with doubleword result.
282 Return nonzero if the operation overflows, assuming it's signed.
283 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
284 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
287 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
288 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 return (*hv & h1) < 0;
304 /* Multiply two doubleword integers with doubleword result.
305 Return nonzero if the operation overflows, assuming it's signed.
306 Each argument is given as two `HOST_WIDE_INT' pieces.
307 One argument is L1 and H1; the other, L2 and H2.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
315 HOST_WIDE_INT arg1[4];
316 HOST_WIDE_INT arg2[4];
317 HOST_WIDE_INT prod[4 * 2];
318 unsigned HOST_WIDE_INT carry;
320 unsigned HOST_WIDE_INT toplow, neglow;
321 HOST_WIDE_INT tophigh, neghigh;
323 encode (arg1, l1, h1);
324 encode (arg2, l2, h2);
326 memset (prod, 0, sizeof prod);
328 for (i = 0; i < 4; i++)
331 for (j = 0; j < 4; j++)
334 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
335 carry += arg1[i] * arg2[j];
336 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
338 prod[k] = LOWPART (carry);
339 carry = HIGHPART (carry);
344 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
346 /* Check for overflow by calculating the top half of the answer in full;
347 it should agree with the low half's sign bit. */
348 decode (prod + 4, &toplow, &tophigh);
351 neg_double (l2, h2, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
356 neg_double (l1, h1, &neglow, &neghigh);
357 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
359 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
362 /* Shift the doubleword integer in L1, H1 left by COUNT places
363 keeping only PREC bits of result.
364 Shift right if COUNT is negative.
365 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
366 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 HOST_WIDE_INT count, unsigned int prec,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
373 unsigned HOST_WIDE_INT signmask;
377 rshift_double (l1, h1, -count, prec, lv, hv, arith);
381 if (SHIFT_COUNT_TRUNCATED)
384 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
386 /* Shifting by the host word size is undefined according to the
387 ANSI standard, so we must handle this as a special case. */
391 else if (count >= HOST_BITS_PER_WIDE_INT)
393 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
398 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
399 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
403 /* Sign extend all bits that are beyond the precision. */
405 signmask = -((prec > HOST_BITS_PER_WIDE_INT
406 ? ((unsigned HOST_WIDE_INT) *hv
407 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
408 : (*lv >> (prec - 1))) & 1);
410 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
412 else if (prec >= HOST_BITS_PER_WIDE_INT)
414 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
415 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
420 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
421 *lv |= signmask << prec;
425 /* Shift the doubleword integer in L1, H1 right by COUNT places
426 keeping only PREC bits of result. COUNT must be positive.
427 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
428 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
432 HOST_WIDE_INT count, unsigned int prec,
433 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
436 unsigned HOST_WIDE_INT signmask;
439 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
442 if (SHIFT_COUNT_TRUNCATED)
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count >= HOST_BITS_PER_WIDE_INT)
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
639 { /* scale divisor and dividend */
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
718 decode (quo, lquo, hquo);
721 /* If result is negative, make it so. */
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* Compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, <wice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code)
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
850 type = TREE_TYPE (t);
853 switch (TREE_CODE (t))
856 if (TYPE_UNSIGNED (type) || ! flag_trapv)
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
883 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t, 1))
887 && reorder_operands_p (TREE_OPERAND (t, 0),
888 TREE_OPERAND (t, 1)))
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t, 0));
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1));
900 if (TYPE_UNSIGNED (TREE_TYPE (t)))
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
907 return negate_expr_p (TREE_OPERAND (t, 1))
908 || negate_expr_p (TREE_OPERAND (t, 0));
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type) == REAL_TYPE)
915 tree tem = strip_float_extensions (t);
917 return negate_expr_p (tem);
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
928 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
929 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
931 tree op1 = TREE_OPERAND (t, 1);
932 if (TREE_INT_CST_HIGH (op1) == 0
933 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
934 == TREE_INT_CST_LOW (op1))
945 /* Given T, an expression, return the negation of T. Allow for T to be
946 null, in which case return null. */
957 type = TREE_TYPE (t);
960 switch (TREE_CODE (t))
963 tem = fold_negate_const (t, type);
964 if (! TREE_OVERFLOW (tem)
965 || TYPE_UNSIGNED (type)
971 tem = fold_negate_const (t, type);
972 /* Two's complement FP formats, such as c4x, may overflow. */
973 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
974 return fold_convert (type, tem);
979 tree rpart = negate_expr (TREE_REALPART (t));
980 tree ipart = negate_expr (TREE_IMAGPART (t));
982 if ((TREE_CODE (rpart) == REAL_CST
983 && TREE_CODE (ipart) == REAL_CST)
984 || (TREE_CODE (rpart) == INTEGER_CST
985 && TREE_CODE (ipart) == INTEGER_CST))
986 return build_complex (type, rpart, ipart);
991 return fold_convert (type, TREE_OPERAND (t, 0));
994 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
996 /* -(A + B) -> (-B) - A. */
997 if (negate_expr_p (TREE_OPERAND (t, 1))
998 && reorder_operands_p (TREE_OPERAND (t, 0),
999 TREE_OPERAND (t, 1)))
1000 return fold_convert (type,
1001 fold (build (MINUS_EXPR, TREE_TYPE (t),
1002 negate_expr (TREE_OPERAND (t, 1)),
1003 TREE_OPERAND (t, 0))));
1004 /* -(A + B) -> (-A) - B. */
1005 if (negate_expr_p (TREE_OPERAND (t, 0)))
1006 return fold_convert (type,
1007 fold (build (MINUS_EXPR, TREE_TYPE (t),
1008 negate_expr (TREE_OPERAND (t, 0)),
1009 TREE_OPERAND (t, 1))));
1014 /* - (A - B) -> B - A */
1015 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1016 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1017 return fold_convert (type,
1018 fold (build (MINUS_EXPR, TREE_TYPE (t),
1019 TREE_OPERAND (t, 1),
1020 TREE_OPERAND (t, 0))));
1024 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1030 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1032 tem = TREE_OPERAND (t, 1);
1033 if (negate_expr_p (tem))
1034 return fold_convert (type,
1035 fold (build (TREE_CODE (t), TREE_TYPE (t),
1036 TREE_OPERAND (t, 0),
1037 negate_expr (tem))));
1038 tem = TREE_OPERAND (t, 0);
1039 if (negate_expr_p (tem))
1040 return fold_convert (type,
1041 fold (build (TREE_CODE (t), TREE_TYPE (t),
1043 TREE_OPERAND (t, 1))));
1048 /* Convert -((double)float) into (double)(-float). */
1049 if (TREE_CODE (type) == REAL_TYPE)
1051 tem = strip_float_extensions (t);
1052 if (tem != t && negate_expr_p (tem))
1053 return fold_convert (type, negate_expr (tem));
1058 /* Negate -f(x) as f(-x). */
1059 if (negate_mathfn_p (builtin_mathfn_code (t))
1060 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1062 tree fndecl, arg, arglist;
1064 fndecl = get_callee_fndecl (t);
1065 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1066 arglist = build_tree_list (NULL_TREE, arg);
1067 return build_function_call_expr (fndecl, arglist);
1072 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1073 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1075 tree op1 = TREE_OPERAND (t, 1);
1076 if (TREE_INT_CST_HIGH (op1) == 0
1077 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1078 == TREE_INT_CST_LOW (op1))
1080 tree ntype = TYPE_UNSIGNED (type)
1081 ? lang_hooks.types.signed_type (type)
1082 : lang_hooks.types.unsigned_type (type);
1083 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1084 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1085 return fold_convert (type, temp);
1094 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1095 return fold_convert (type, tem);
1098 /* Split a tree IN into a constant, literal and variable parts that could be
1099 combined with CODE to make IN. "constant" means an expression with
1100 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1101 commutative arithmetic operation. Store the constant part into *CONP,
1102 the literal in *LITP and return the variable part. If a part isn't
1103 present, set it to null. If the tree does not decompose in this way,
1104 return the entire tree as the variable part and the other parts as null.
1106 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1107 case, we negate an operand that was subtracted. Except if it is a
1108 literal for which we use *MINUS_LITP instead.
1110 If NEGATE_P is true, we are negating all of IN, again except a literal
1111 for which we use *MINUS_LITP instead.
1113 If IN is itself a literal or constant, return it as appropriate.
1115 Note that we do not guarantee that any of the three values will be the
1116 same type as IN, but they will have the same signedness and mode. */
1119 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1120 tree *minus_litp, int negate_p)
1128 /* Strip any conversions that don't change the machine mode or signedness. */
1129 STRIP_SIGN_NOPS (in);
1131 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1133 else if (TREE_CODE (in) == code
1134 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1135 /* We can associate addition and subtraction together (even
1136 though the C standard doesn't say so) for integers because
1137 the value is not affected. For reals, the value might be
1138 affected, so we can't. */
1139 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1140 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1142 tree op0 = TREE_OPERAND (in, 0);
1143 tree op1 = TREE_OPERAND (in, 1);
1144 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1145 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1147 /* First see if either of the operands is a literal, then a constant. */
1148 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1149 *litp = op0, op0 = 0;
1150 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1151 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1153 if (op0 != 0 && TREE_CONSTANT (op0))
1154 *conp = op0, op0 = 0;
1155 else if (op1 != 0 && TREE_CONSTANT (op1))
1156 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1158 /* If we haven't dealt with either operand, this is not a case we can
1159 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1160 if (op0 != 0 && op1 != 0)
1165 var = op1, neg_var_p = neg1_p;
1167 /* Now do any needed negations. */
1169 *minus_litp = *litp, *litp = 0;
1171 *conp = negate_expr (*conp);
1173 var = negate_expr (var);
1175 else if (TREE_CONSTANT (in))
1183 *minus_litp = *litp, *litp = 0;
1184 else if (*minus_litp)
1185 *litp = *minus_litp, *minus_litp = 0;
1186 *conp = negate_expr (*conp);
1187 var = negate_expr (var);
1193 /* Re-associate trees split by the above function. T1 and T2 are either
1194 expressions to associate or null. Return the new expression, if any. If
1195 we build an operation, do it in TYPE and with CODE. */
1198 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1205 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1206 try to fold this since we will have infinite recursion. But do
1207 deal with any NEGATE_EXPRs. */
1208 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1209 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1211 if (code == PLUS_EXPR)
1213 if (TREE_CODE (t1) == NEGATE_EXPR)
1214 return build (MINUS_EXPR, type, fold_convert (type, t2),
1215 fold_convert (type, TREE_OPERAND (t1, 0)));
1216 else if (TREE_CODE (t2) == NEGATE_EXPR)
1217 return build (MINUS_EXPR, type, fold_convert (type, t1),
1218 fold_convert (type, TREE_OPERAND (t2, 0)));
1220 return build (code, type, fold_convert (type, t1),
1221 fold_convert (type, t2));
1224 return fold (build (code, type, fold_convert (type, t1),
1225 fold_convert (type, t2)));
1228 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1229 to produce a new constant.
1231 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1234 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1236 unsigned HOST_WIDE_INT int1l, int2l;
1237 HOST_WIDE_INT int1h, int2h;
1238 unsigned HOST_WIDE_INT low;
1240 unsigned HOST_WIDE_INT garbagel;
1241 HOST_WIDE_INT garbageh;
1243 tree type = TREE_TYPE (arg1);
1244 int uns = TYPE_UNSIGNED (type);
1246 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1248 int no_overflow = 0;
1250 int1l = TREE_INT_CST_LOW (arg1);
1251 int1h = TREE_INT_CST_HIGH (arg1);
1252 int2l = TREE_INT_CST_LOW (arg2);
1253 int2h = TREE_INT_CST_HIGH (arg2);
1258 low = int1l | int2l, hi = int1h | int2h;
1262 low = int1l ^ int2l, hi = int1h ^ int2h;
1266 low = int1l & int2l, hi = int1h & int2h;
1272 /* It's unclear from the C standard whether shifts can overflow.
1273 The following code ignores overflow; perhaps a C standard
1274 interpretation ruling is needed. */
1275 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1283 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1288 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1292 neg_double (int2l, int2h, &low, &hi);
1293 add_double (int1l, int1h, low, hi, &low, &hi);
1294 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1298 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1301 case TRUNC_DIV_EXPR:
1302 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1303 case EXACT_DIV_EXPR:
1304 /* This is a shortcut for a common special case. */
1305 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1306 && ! TREE_CONSTANT_OVERFLOW (arg1)
1307 && ! TREE_CONSTANT_OVERFLOW (arg2)
1308 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1310 if (code == CEIL_DIV_EXPR)
1313 low = int1l / int2l, hi = 0;
1317 /* ... fall through ... */
1319 case ROUND_DIV_EXPR:
1320 if (int2h == 0 && int2l == 1)
1322 low = int1l, hi = int1h;
1325 if (int1l == int2l && int1h == int2h
1326 && ! (int1l == 0 && int1h == 0))
1331 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1332 &low, &hi, &garbagel, &garbageh);
1335 case TRUNC_MOD_EXPR:
1336 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1337 /* This is a shortcut for a common special case. */
1338 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1339 && ! TREE_CONSTANT_OVERFLOW (arg1)
1340 && ! TREE_CONSTANT_OVERFLOW (arg2)
1341 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1343 if (code == CEIL_MOD_EXPR)
1345 low = int1l % int2l, hi = 0;
1349 /* ... fall through ... */
1351 case ROUND_MOD_EXPR:
1352 overflow = div_and_round_double (code, uns,
1353 int1l, int1h, int2l, int2h,
1354 &garbagel, &garbageh, &low, &hi);
1360 low = (((unsigned HOST_WIDE_INT) int1h
1361 < (unsigned HOST_WIDE_INT) int2h)
1362 || (((unsigned HOST_WIDE_INT) int1h
1363 == (unsigned HOST_WIDE_INT) int2h)
1366 low = (int1h < int2h
1367 || (int1h == int2h && int1l < int2l));
1369 if (low == (code == MIN_EXPR))
1370 low = int1l, hi = int1h;
1372 low = int2l, hi = int2h;
1379 /* If this is for a sizetype, can be represented as one (signed)
1380 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1383 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1384 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1385 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1386 return size_int_type_wide (low, type);
1389 t = build_int_2 (low, hi);
1390 TREE_TYPE (t) = TREE_TYPE (arg1);
1395 ? (!uns || is_sizetype) && overflow
1396 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1398 | TREE_OVERFLOW (arg1)
1399 | TREE_OVERFLOW (arg2));
1401 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1402 So check if force_fit_type truncated the value. */
1404 && ! TREE_OVERFLOW (t)
1405 && (TREE_INT_CST_HIGH (t) != hi
1406 || TREE_INT_CST_LOW (t) != low))
1407 TREE_OVERFLOW (t) = 1;
1409 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1410 | TREE_CONSTANT_OVERFLOW (arg1)
1411 | TREE_CONSTANT_OVERFLOW (arg2));
1415 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1416 constant. We assume ARG1 and ARG2 have the same data type, or at least
1417 are the same kind of constant and the same machine mode.
1419 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1422 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1427 if (TREE_CODE (arg1) == INTEGER_CST)
1428 return int_const_binop (code, arg1, arg2, notrunc);
1430 if (TREE_CODE (arg1) == REAL_CST)
1432 enum machine_mode mode;
1435 REAL_VALUE_TYPE value;
1438 d1 = TREE_REAL_CST (arg1);
1439 d2 = TREE_REAL_CST (arg2);
1441 type = TREE_TYPE (arg1);
1442 mode = TYPE_MODE (type);
1444 /* Don't perform operation if we honor signaling NaNs and
1445 either operand is a NaN. */
1446 if (HONOR_SNANS (mode)
1447 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1450 /* Don't perform operation if it would raise a division
1451 by zero exception. */
1452 if (code == RDIV_EXPR
1453 && REAL_VALUES_EQUAL (d2, dconst0)
1454 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1457 /* If either operand is a NaN, just return it. Otherwise, set up
1458 for floating-point trap; we return an overflow. */
1459 if (REAL_VALUE_ISNAN (d1))
1461 else if (REAL_VALUE_ISNAN (d2))
1464 REAL_ARITHMETIC (value, code, d1, d2);
1466 t = build_real (type, real_value_truncate (mode, value));
1469 = (force_fit_type (t, 0)
1470 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1471 TREE_CONSTANT_OVERFLOW (t)
1473 | TREE_CONSTANT_OVERFLOW (arg1)
1474 | TREE_CONSTANT_OVERFLOW (arg2);
1477 if (TREE_CODE (arg1) == COMPLEX_CST)
1479 tree type = TREE_TYPE (arg1);
1480 tree r1 = TREE_REALPART (arg1);
1481 tree i1 = TREE_IMAGPART (arg1);
1482 tree r2 = TREE_REALPART (arg2);
1483 tree i2 = TREE_IMAGPART (arg2);
1489 t = build_complex (type,
1490 const_binop (PLUS_EXPR, r1, r2, notrunc),
1491 const_binop (PLUS_EXPR, i1, i2, notrunc));
1495 t = build_complex (type,
1496 const_binop (MINUS_EXPR, r1, r2, notrunc),
1497 const_binop (MINUS_EXPR, i1, i2, notrunc));
1501 t = build_complex (type,
1502 const_binop (MINUS_EXPR,
1503 const_binop (MULT_EXPR,
1505 const_binop (MULT_EXPR,
1508 const_binop (PLUS_EXPR,
1509 const_binop (MULT_EXPR,
1511 const_binop (MULT_EXPR,
1519 = const_binop (PLUS_EXPR,
1520 const_binop (MULT_EXPR, r2, r2, notrunc),
1521 const_binop (MULT_EXPR, i2, i2, notrunc),
1524 t = build_complex (type,
1526 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1527 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1528 const_binop (PLUS_EXPR,
1529 const_binop (MULT_EXPR, r1, r2,
1531 const_binop (MULT_EXPR, i1, i2,
1534 magsquared, notrunc),
1536 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1537 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1538 const_binop (MINUS_EXPR,
1539 const_binop (MULT_EXPR, i1, r2,
1541 const_binop (MULT_EXPR, r1, i2,
1544 magsquared, notrunc));
1556 /* These are the hash table functions for the hash table of INTEGER_CST
1557 nodes of a sizetype. */
1559 /* Return the hash code code X, an INTEGER_CST. */
1562 size_htab_hash (const void *x)
1566 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1567 ^ htab_hash_pointer (TREE_TYPE (t))
1568 ^ (TREE_OVERFLOW (t) << 20));
1571 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1572 is the same as that given by *Y, which is the same. */
1575 size_htab_eq (const void *x, const void *y)
1580 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1581 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1582 && TREE_TYPE (xt) == TREE_TYPE (yt)
1583 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1586 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1587 bits are given by NUMBER and of the sizetype represented by KIND. */
1590 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1592 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1595 /* Likewise, but the desired type is specified explicitly. */
1597 static GTY (()) tree new_const;
1598 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1602 size_int_type_wide (HOST_WIDE_INT number, tree type)
1608 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1609 new_const = make_node (INTEGER_CST);
1612 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1613 hash table, we return the value from the hash table. Otherwise, we
1614 place that in the hash table and make a new node for the next time. */
1615 TREE_INT_CST_LOW (new_const) = number;
1616 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1617 TREE_TYPE (new_const) = type;
1618 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1619 = force_fit_type (new_const, 0);
1621 slot = htab_find_slot (size_htab, new_const, INSERT);
1627 new_const = make_node (INTEGER_CST);
1631 return (tree) *slot;
1634 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1635 is a tree code. The type of the result is taken from the operands.
1636 Both must be the same type integer type and it must be a size type.
1637 If the operands are constant, so is the result. */
1640 size_binop (enum tree_code code, tree arg0, tree arg1)
1642 tree type = TREE_TYPE (arg0);
1644 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1645 || type != TREE_TYPE (arg1))
1648 /* Handle the special case of two integer constants faster. */
1649 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1651 /* And some specific cases even faster than that. */
1652 if (code == PLUS_EXPR && integer_zerop (arg0))
1654 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1655 && integer_zerop (arg1))
1657 else if (code == MULT_EXPR && integer_onep (arg0))
1660 /* Handle general case of two integer constants. */
1661 return int_const_binop (code, arg0, arg1, 0);
1664 if (arg0 == error_mark_node || arg1 == error_mark_node)
1665 return error_mark_node;
1667 return fold (build (code, type, arg0, arg1));
1670 /* Given two values, either both of sizetype or both of bitsizetype,
1671 compute the difference between the two values. Return the value
1672 in signed type corresponding to the type of the operands. */
1675 size_diffop (tree arg0, tree arg1)
1677 tree type = TREE_TYPE (arg0);
1680 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1681 || type != TREE_TYPE (arg1))
1684 /* If the type is already signed, just do the simple thing. */
1685 if (!TYPE_UNSIGNED (type))
1686 return size_binop (MINUS_EXPR, arg0, arg1);
1688 ctype = (type == bitsizetype || type == ubitsizetype
1689 ? sbitsizetype : ssizetype);
1691 /* If either operand is not a constant, do the conversions to the signed
1692 type and subtract. The hardware will do the right thing with any
1693 overflow in the subtraction. */
1694 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1696 fold_convert (ctype, arg1));
1698 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1699 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1700 overflow) and negate (which can't either). Special-case a result
1701 of zero while we're here. */
1702 if (tree_int_cst_equal (arg0, arg1))
1703 return fold_convert (ctype, integer_zero_node);
1704 else if (tree_int_cst_lt (arg1, arg0))
1705 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1707 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1708 fold_convert (ctype, size_binop (MINUS_EXPR,
1713 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1714 type TYPE. If no simplification can be done return NULL_TREE. */
1717 fold_convert_const (enum tree_code code, tree type, tree arg1)
1722 if (TREE_TYPE (arg1) == type)
1725 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1727 if (TREE_CODE (arg1) == INTEGER_CST)
1729 /* If we would build a constant wider than GCC supports,
1730 leave the conversion unfolded. */
1731 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1734 /* If we are trying to make a sizetype for a small integer, use
1735 size_int to pick up cached types to reduce duplicate nodes. */
1736 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1737 && !TREE_CONSTANT_OVERFLOW (arg1)
1738 && compare_tree_int (arg1, 10000) < 0)
1739 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1741 /* Given an integer constant, make new constant with new type,
1742 appropriately sign-extended or truncated. */
1743 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1744 TREE_INT_CST_HIGH (arg1));
1745 TREE_TYPE (t) = type;
1746 /* Indicate an overflow if (1) ARG1 already overflowed,
1747 or (2) force_fit_type indicates an overflow.
1748 Tell force_fit_type that an overflow has already occurred
1749 if ARG1 is a too-large unsigned value and T is signed.
1750 But don't indicate an overflow if converting a pointer. */
1752 = ((force_fit_type (t,
1753 (TREE_INT_CST_HIGH (arg1) < 0
1754 && (TYPE_UNSIGNED (type)
1755 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1756 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1757 || TREE_OVERFLOW (arg1));
1758 TREE_CONSTANT_OVERFLOW (t)
1759 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1762 else if (TREE_CODE (arg1) == REAL_CST)
1764 /* The following code implements the floating point to integer
1765 conversion rules required by the Java Language Specification,
1766 that IEEE NaNs are mapped to zero and values that overflow
1767 the target precision saturate, i.e. values greater than
1768 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1769 are mapped to INT_MIN. These semantics are allowed by the
1770 C and C++ standards that simply state that the behavior of
1771 FP-to-integer conversion is unspecified upon overflow. */
1773 HOST_WIDE_INT high, low;
1776 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1780 case FIX_TRUNC_EXPR:
1781 real_trunc (&r, VOIDmode, &x);
1785 real_ceil (&r, VOIDmode, &x);
1788 case FIX_FLOOR_EXPR:
1789 real_floor (&r, VOIDmode, &x);
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1804 /* See if R is less than the lower bound or greater than the
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1821 tree ut = TYPE_MAX_VALUE (type);
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_2 (low, high);
1838 TREE_TYPE (t) = type;
1840 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1841 TREE_CONSTANT_OVERFLOW (t)
1842 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 if (TREE_CODE (arg1) == REAL_CST)
1852 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1854 /* We make a copy of ARG1 so that we don't modify an
1855 existing constant tree. */
1856 t = copy_node (arg1);
1857 TREE_TYPE (t) = type;
1861 t = build_real (type,
1862 real_value_truncate (TYPE_MODE (type),
1863 TREE_REAL_CST (arg1)));
1866 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1867 TREE_CONSTANT_OVERFLOW (t)
1868 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1879 fold_convert (tree type, tree arg)
1881 tree orig = TREE_TYPE (arg);
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return error_mark_node;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return fold (build1 (NOP_EXPR, type, arg));
1895 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1897 if (TREE_CODE (arg) == INTEGER_CST)
1899 tem = fold_convert_const (NOP_EXPR, type, arg);
1900 if (tem != NULL_TREE)
1903 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1904 return fold (build1 (NOP_EXPR, type, arg));
1905 if (TREE_CODE (orig) == COMPLEX_TYPE)
1907 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1908 return fold_convert (type, tem);
1910 if (TREE_CODE (orig) == VECTOR_TYPE
1911 && GET_MODE_SIZE (TYPE_MODE (type))
1912 == GET_MODE_SIZE (TYPE_MODE (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 else if (TREE_CODE (type) == REAL_TYPE)
1917 if (TREE_CODE (arg) == INTEGER_CST)
1919 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1920 if (tem != NULL_TREE)
1923 else if (TREE_CODE (arg) == REAL_CST)
1925 tem = fold_convert_const (NOP_EXPR, type, arg);
1926 if (tem != NULL_TREE)
1930 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1931 return fold (build1 (FLOAT_EXPR, type, arg));
1932 if (TREE_CODE (orig) == REAL_TYPE)
1933 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1935 if (TREE_CODE (orig) == COMPLEX_TYPE)
1937 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1938 return fold_convert (type, tem);
1941 else if (TREE_CODE (type) == COMPLEX_TYPE)
1943 if (INTEGRAL_TYPE_P (orig)
1944 || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == REAL_TYPE)
1946 return build (COMPLEX_EXPR, type,
1947 fold_convert (TREE_TYPE (type), arg),
1948 fold_convert (TREE_TYPE (type), integer_zero_node));
1949 if (TREE_CODE (orig) == COMPLEX_TYPE)
1953 if (TREE_CODE (arg) == COMPLEX_EXPR)
1955 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1956 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1957 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1960 arg = save_expr (arg);
1961 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1962 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1963 rpart = fold_convert (TREE_TYPE (type), rpart);
1964 ipart = fold_convert (TREE_TYPE (type), ipart);
1965 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1968 else if (TREE_CODE (type) == VECTOR_TYPE)
1970 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1971 && GET_MODE_SIZE (TYPE_MODE (type))
1972 == GET_MODE_SIZE (TYPE_MODE (orig)))
1973 return fold (build1 (NOP_EXPR, type, arg));
1974 if (TREE_CODE (orig) == VECTOR_TYPE
1975 && GET_MODE_SIZE (TYPE_MODE (type))
1976 == GET_MODE_SIZE (TYPE_MODE (orig)))
1977 return fold (build1 (NOP_EXPR, type, arg));
1979 else if (VOID_TYPE_P (type))
1980 return fold (build1 (CONVERT_EXPR, type, arg));
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1991 /* These things are certainly not lvalues. */
1992 if (TREE_CODE (x) == NON_LVALUE_EXPR
1993 || TREE_CODE (x) == INTEGER_CST
1994 || TREE_CODE (x) == REAL_CST
1995 || TREE_CODE (x) == STRING_CST
1996 || TREE_CODE (x) == ADDR_EXPR)
1999 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2000 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2004 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2005 Zero means allow extended lvalues. */
2007 int pedantic_lvalues;
2009 /* When pedantic, return an expr equal to X but certainly not valid as a
2010 pedantic lvalue. Otherwise, return X. */
2013 pedantic_non_lvalue (tree x)
2015 if (pedantic_lvalues)
2016 return non_lvalue (x);
2021 /* Given a tree comparison code, return the code that is the logical inverse
2022 of the given code. It is not safe to do this for floating-point
2023 comparisons, except for NE_EXPR and EQ_EXPR. */
2025 static enum tree_code
2026 invert_tree_comparison (enum tree_code code)
2047 /* Similar, but return the comparison that results if the operands are
2048 swapped. This is safe for floating-point. */
2050 static enum tree_code
2051 swap_tree_comparison (enum tree_code code)
2072 /* Convert a comparison tree code from an enum tree_code representation
2073 into a compcode bit-based encoding. This function is the inverse of
2074 compcode_to_comparison. */
2077 comparison_to_compcode (enum tree_code code)
2098 /* Convert a compcode bit-based encoding of a comparison operator back
2099 to GCC's enum tree_code representation. This function is the
2100 inverse of comparison_to_compcode. */
2102 static enum tree_code
2103 compcode_to_comparison (int code)
2124 /* Return nonzero if CODE is a tree code that represents a truth value. */
2127 truth_value_p (enum tree_code code)
2129 return (TREE_CODE_CLASS (code) == '<'
2130 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2131 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2132 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2135 /* Return nonzero if two operands (typically of the same tree node)
2136 are necessarily equal. If either argument has side-effects this
2137 function returns zero.
2139 If ONLY_CONST is nonzero, only return nonzero for constants.
2140 This function tests whether the operands are indistinguishable;
2141 it does not test whether they are equal using C's == operation.
2142 The distinction is important for IEEE floating point, because
2143 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2144 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2146 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2147 even though it may hold multiple values during a function.
2148 This is because a GCC tree node guarantees that nothing else is
2149 executed between the evaluation of its "operands" (which may often
2150 be evaluated in arbitrary order). Hence if the operands themselves
2151 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2152 same value in each operand/subexpression. Hence a zero value for
2153 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2154 If comparing arbitrary expression trees, such as from different
2155 statements, ONLY_CONST must usually be nonzero. */
2158 operand_equal_p (tree arg0, tree arg1, int only_const)
2162 /* If either is ERROR_MARK, they aren't equal. */
2163 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2166 /* If both types don't have the same signedness, then we can't consider
2167 them equal. We must check this before the STRIP_NOPS calls
2168 because they may change the signedness of the arguments. */
2169 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2175 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2176 /* This is needed for conversions and for COMPONENT_REF.
2177 Might as well play it safe and always test this. */
2178 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2179 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2180 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2183 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2184 We don't care about side effects in that case because the SAVE_EXPR
2185 takes care of that for us. In all other cases, two expressions are
2186 equal if they have no side effects. If we have two identical
2187 expressions with side effects that should be treated the same due
2188 to the only side effects being identical SAVE_EXPR's, that will
2189 be detected in the recursive calls below. */
2190 if (arg0 == arg1 && ! only_const
2191 && (TREE_CODE (arg0) == SAVE_EXPR
2192 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2195 /* Next handle constant cases, those for which we can return 1 even
2196 if ONLY_CONST is set. */
2197 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2198 switch (TREE_CODE (arg0))
2201 return (! TREE_CONSTANT_OVERFLOW (arg0)
2202 && ! TREE_CONSTANT_OVERFLOW (arg1)
2203 && tree_int_cst_equal (arg0, arg1));
2206 return (! TREE_CONSTANT_OVERFLOW (arg0)
2207 && ! TREE_CONSTANT_OVERFLOW (arg1)
2208 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2209 TREE_REAL_CST (arg1)));
2215 if (TREE_CONSTANT_OVERFLOW (arg0)
2216 || TREE_CONSTANT_OVERFLOW (arg1))
2219 v1 = TREE_VECTOR_CST_ELTS (arg0);
2220 v2 = TREE_VECTOR_CST_ELTS (arg1);
2223 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2226 v1 = TREE_CHAIN (v1);
2227 v2 = TREE_CHAIN (v2);
2234 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2236 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2240 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2241 && ! memcmp (TREE_STRING_POINTER (arg0),
2242 TREE_STRING_POINTER (arg1),
2243 TREE_STRING_LENGTH (arg0)));
2246 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2255 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2258 /* Two conversions are equal only if signedness and modes match. */
2259 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2260 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2261 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2264 return operand_equal_p (TREE_OPERAND (arg0, 0),
2265 TREE_OPERAND (arg1, 0), 0);
2269 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2270 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2274 /* For commutative ops, allow the other order. */
2275 return (commutative_tree_code (TREE_CODE (arg0))
2276 && operand_equal_p (TREE_OPERAND (arg0, 0),
2277 TREE_OPERAND (arg1, 1), 0)
2278 && operand_equal_p (TREE_OPERAND (arg0, 1),
2279 TREE_OPERAND (arg1, 0), 0));
2282 /* If either of the pointer (or reference) expressions we are
2283 dereferencing contain a side effect, these cannot be equal. */
2284 if (TREE_SIDE_EFFECTS (arg0)
2285 || TREE_SIDE_EFFECTS (arg1))
2288 switch (TREE_CODE (arg0))
2291 return operand_equal_p (TREE_OPERAND (arg0, 0),
2292 TREE_OPERAND (arg1, 0), 0);
2296 case ARRAY_RANGE_REF:
2297 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2298 TREE_OPERAND (arg1, 0), 0)
2299 && operand_equal_p (TREE_OPERAND (arg0, 1),
2300 TREE_OPERAND (arg1, 1), 0));
2303 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2304 TREE_OPERAND (arg1, 0), 0)
2305 && operand_equal_p (TREE_OPERAND (arg0, 1),
2306 TREE_OPERAND (arg1, 1), 0)
2307 && operand_equal_p (TREE_OPERAND (arg0, 2),
2308 TREE_OPERAND (arg1, 2), 0));
2314 switch (TREE_CODE (arg0))
2317 case TRUTH_NOT_EXPR:
2318 return operand_equal_p (TREE_OPERAND (arg0, 0),
2319 TREE_OPERAND (arg1, 0), 0);
2322 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2325 /* If the CALL_EXPRs call different functions, then they
2326 clearly can not be equal. */
2327 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2328 TREE_OPERAND (arg1, 0), 0))
2331 /* Only consider const functions equivalent. */
2332 fndecl = get_callee_fndecl (arg0);
2333 if (fndecl == NULL_TREE
2334 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2337 /* Now see if all the arguments are the same. operand_equal_p
2338 does not handle TREE_LIST, so we walk the operands here
2339 feeding them to operand_equal_p. */
2340 arg0 = TREE_OPERAND (arg0, 1);
2341 arg1 = TREE_OPERAND (arg1, 1);
2342 while (arg0 && arg1)
2344 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2347 arg0 = TREE_CHAIN (arg0);
2348 arg1 = TREE_CHAIN (arg1);
2351 /* If we get here and both argument lists are exhausted
2352 then the CALL_EXPRs are equal. */
2353 return ! (arg0 || arg1);
2360 /* Consider __builtin_sqrt equal to sqrt. */
2361 return TREE_CODE (arg0) == FUNCTION_DECL
2362 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2363 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2364 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2371 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2372 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2374 When in doubt, return 0. */
2377 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2379 int unsignedp1, unsignedpo;
2380 tree primarg0, primarg1, primother;
2381 unsigned int correct_width;
2383 if (operand_equal_p (arg0, arg1, 0))
2386 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2387 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2390 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2391 and see if the inner values are the same. This removes any
2392 signedness comparison, which doesn't matter here. */
2393 primarg0 = arg0, primarg1 = arg1;
2394 STRIP_NOPS (primarg0);
2395 STRIP_NOPS (primarg1);
2396 if (operand_equal_p (primarg0, primarg1, 0))
2399 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2400 actual comparison operand, ARG0.
2402 First throw away any conversions to wider types
2403 already present in the operands. */
2405 primarg1 = get_narrower (arg1, &unsignedp1);
2406 primother = get_narrower (other, &unsignedpo);
2408 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2409 if (unsignedp1 == unsignedpo
2410 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2411 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2413 tree type = TREE_TYPE (arg0);
2415 /* Make sure shorter operand is extended the right way
2416 to match the longer operand. */
2417 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2418 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2420 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2427 /* See if ARG is an expression that is either a comparison or is performing
2428 arithmetic on comparisons. The comparisons must only be comparing
2429 two different values, which will be stored in *CVAL1 and *CVAL2; if
2430 they are nonzero it means that some operands have already been found.
2431 No variables may be used anywhere else in the expression except in the
2432 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2433 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2435 If this is true, return 1. Otherwise, return zero. */
2438 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2440 enum tree_code code = TREE_CODE (arg);
2441 char class = TREE_CODE_CLASS (code);
2443 /* We can handle some of the 'e' cases here. */
2444 if (class == 'e' && code == TRUTH_NOT_EXPR)
2446 else if (class == 'e'
2447 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2448 || code == COMPOUND_EXPR))
2451 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2452 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2454 /* If we've already found a CVAL1 or CVAL2, this expression is
2455 two complex to handle. */
2456 if (*cval1 || *cval2)
2466 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2469 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2470 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2471 cval1, cval2, save_p));
2477 if (code == COND_EXPR)
2478 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2479 cval1, cval2, save_p)
2480 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2481 cval1, cval2, save_p)
2482 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2483 cval1, cval2, save_p));
2487 /* First see if we can handle the first operand, then the second. For
2488 the second operand, we know *CVAL1 can't be zero. It must be that
2489 one side of the comparison is each of the values; test for the
2490 case where this isn't true by failing if the two operands
2493 if (operand_equal_p (TREE_OPERAND (arg, 0),
2494 TREE_OPERAND (arg, 1), 0))
2498 *cval1 = TREE_OPERAND (arg, 0);
2499 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2501 else if (*cval2 == 0)
2502 *cval2 = TREE_OPERAND (arg, 0);
2503 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2508 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2510 else if (*cval2 == 0)
2511 *cval2 = TREE_OPERAND (arg, 1);
2512 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2524 /* ARG is a tree that is known to contain just arithmetic operations and
2525 comparisons. Evaluate the operations in the tree substituting NEW0 for
2526 any occurrence of OLD0 as an operand of a comparison and likewise for
2530 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2532 tree type = TREE_TYPE (arg);
2533 enum tree_code code = TREE_CODE (arg);
2534 char class = TREE_CODE_CLASS (code);
2536 /* We can handle some of the 'e' cases here. */
2537 if (class == 'e' && code == TRUTH_NOT_EXPR)
2539 else if (class == 'e'
2540 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2546 return fold (build1 (code, type,
2547 eval_subst (TREE_OPERAND (arg, 0),
2548 old0, new0, old1, new1)));
2551 return fold (build (code, type,
2552 eval_subst (TREE_OPERAND (arg, 0),
2553 old0, new0, old1, new1),
2554 eval_subst (TREE_OPERAND (arg, 1),
2555 old0, new0, old1, new1)));
2561 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2564 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2567 return fold (build (code, type,
2568 eval_subst (TREE_OPERAND (arg, 0),
2569 old0, new0, old1, new1),
2570 eval_subst (TREE_OPERAND (arg, 1),
2571 old0, new0, old1, new1),
2572 eval_subst (TREE_OPERAND (arg, 2),
2573 old0, new0, old1, new1)));
2577 /* Fall through - ??? */
2581 tree arg0 = TREE_OPERAND (arg, 0);
2582 tree arg1 = TREE_OPERAND (arg, 1);
2584 /* We need to check both for exact equality and tree equality. The
2585 former will be true if the operand has a side-effect. In that
2586 case, we know the operand occurred exactly once. */
2588 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2590 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2593 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2595 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2598 return fold (build (code, type, arg0, arg1));
2606 /* Return a tree for the case when the result of an expression is RESULT
2607 converted to TYPE and OMITTED was previously an operand of the expression
2608 but is now not needed (e.g., we folded OMITTED * 0).
2610 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2611 the conversion of RESULT to TYPE. */
2614 omit_one_operand (tree type, tree result, tree omitted)
2616 tree t = fold_convert (type, result);
2618 if (TREE_SIDE_EFFECTS (omitted))
2619 return build (COMPOUND_EXPR, type, omitted, t);
2621 return non_lvalue (t);
2624 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2627 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2629 tree t = fold_convert (type, result);
2631 if (TREE_SIDE_EFFECTS (omitted))
2632 return build (COMPOUND_EXPR, type, omitted, t);
2634 return pedantic_non_lvalue (t);
2637 /* Return a simplified tree node for the truth-negation of ARG. This
2638 never alters ARG itself. We assume that ARG is an operation that
2639 returns a truth value (0 or 1). */
2642 invert_truthvalue (tree arg)
2644 tree type = TREE_TYPE (arg);
2645 enum tree_code code = TREE_CODE (arg);
2647 if (code == ERROR_MARK)
2650 /* If this is a comparison, we can simply invert it, except for
2651 floating-point non-equality comparisons, in which case we just
2652 enclose a TRUTH_NOT_EXPR around what we have. */
2654 if (TREE_CODE_CLASS (code) == '<')
2656 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2657 && !flag_unsafe_math_optimizations
2660 return build1 (TRUTH_NOT_EXPR, type, arg);
2661 else if (code == UNORDERED_EXPR
2662 || code == ORDERED_EXPR
2663 || code == UNEQ_EXPR
2664 || code == UNLT_EXPR
2665 || code == UNLE_EXPR
2666 || code == UNGT_EXPR
2667 || code == UNGE_EXPR)
2668 return build1 (TRUTH_NOT_EXPR, type, arg);
2670 return build (invert_tree_comparison (code), type,
2671 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2677 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2679 case TRUTH_AND_EXPR:
2680 return build (TRUTH_OR_EXPR, type,
2681 invert_truthvalue (TREE_OPERAND (arg, 0)),
2682 invert_truthvalue (TREE_OPERAND (arg, 1)));
2685 return build (TRUTH_AND_EXPR, type,
2686 invert_truthvalue (TREE_OPERAND (arg, 0)),
2687 invert_truthvalue (TREE_OPERAND (arg, 1)));
2689 case TRUTH_XOR_EXPR:
2690 /* Here we can invert either operand. We invert the first operand
2691 unless the second operand is a TRUTH_NOT_EXPR in which case our
2692 result is the XOR of the first operand with the inside of the
2693 negation of the second operand. */
2695 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2696 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2697 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2699 return build (TRUTH_XOR_EXPR, type,
2700 invert_truthvalue (TREE_OPERAND (arg, 0)),
2701 TREE_OPERAND (arg, 1));
2703 case TRUTH_ANDIF_EXPR:
2704 return build (TRUTH_ORIF_EXPR, type,
2705 invert_truthvalue (TREE_OPERAND (arg, 0)),
2706 invert_truthvalue (TREE_OPERAND (arg, 1)));
2708 case TRUTH_ORIF_EXPR:
2709 return build (TRUTH_ANDIF_EXPR, type,
2710 invert_truthvalue (TREE_OPERAND (arg, 0)),
2711 invert_truthvalue (TREE_OPERAND (arg, 1)));
2713 case TRUTH_NOT_EXPR:
2714 return TREE_OPERAND (arg, 0);
2717 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2718 invert_truthvalue (TREE_OPERAND (arg, 1)),
2719 invert_truthvalue (TREE_OPERAND (arg, 2)));
2722 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2723 invert_truthvalue (TREE_OPERAND (arg, 1)));
2725 case NON_LVALUE_EXPR:
2726 return invert_truthvalue (TREE_OPERAND (arg, 0));
2731 return build1 (TREE_CODE (arg), type,
2732 invert_truthvalue (TREE_OPERAND (arg, 0)));
2735 if (!integer_onep (TREE_OPERAND (arg, 1)))
2737 return build (EQ_EXPR, type, arg,
2738 fold_convert (type, integer_zero_node));
2741 return build1 (TRUTH_NOT_EXPR, type, arg);
2743 case CLEANUP_POINT_EXPR:
2744 return build1 (CLEANUP_POINT_EXPR, type,
2745 invert_truthvalue (TREE_OPERAND (arg, 0)));
2750 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2752 return build1 (TRUTH_NOT_EXPR, type, arg);
2755 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2756 operands are another bit-wise operation with a common input. If so,
2757 distribute the bit operations to save an operation and possibly two if
2758 constants are involved. For example, convert
2759 (A | B) & (A | C) into A | (B & C)
2760 Further simplification will occur if B and C are constants.
2762 If this optimization cannot be done, 0 will be returned. */
2765 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2770 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2771 || TREE_CODE (arg0) == code
2772 || (TREE_CODE (arg0) != BIT_AND_EXPR
2773 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2776 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2778 common = TREE_OPERAND (arg0, 0);
2779 left = TREE_OPERAND (arg0, 1);
2780 right = TREE_OPERAND (arg1, 1);
2782 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2784 common = TREE_OPERAND (arg0, 0);
2785 left = TREE_OPERAND (arg0, 1);
2786 right = TREE_OPERAND (arg1, 0);
2788 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2790 common = TREE_OPERAND (arg0, 1);
2791 left = TREE_OPERAND (arg0, 0);
2792 right = TREE_OPERAND (arg1, 1);
2794 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2796 common = TREE_OPERAND (arg0, 1);
2797 left = TREE_OPERAND (arg0, 0);
2798 right = TREE_OPERAND (arg1, 0);
2803 return fold (build (TREE_CODE (arg0), type, common,
2804 fold (build (code, type, left, right))));
2807 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2808 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2811 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2814 tree result = build (BIT_FIELD_REF, type, inner,
2815 size_int (bitsize), bitsize_int (bitpos));
2817 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
2822 /* Optimize a bit-field compare.
2824 There are two cases: First is a compare against a constant and the
2825 second is a comparison of two items where the fields are at the same
2826 bit position relative to the start of a chunk (byte, halfword, word)
2827 large enough to contain it. In these cases we can avoid the shift
2828 implicit in bitfield extractions.
2830 For constants, we emit a compare of the shifted constant with the
2831 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2832 compared. For two fields at the same position, we do the ANDs with the
2833 similar mask and compare the result of the ANDs.
2835 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2836 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2837 are the left and right operands of the comparison, respectively.
2839 If the optimization described above can be done, we return the resulting
2840 tree. Otherwise we return zero. */
2843 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2846 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2847 tree type = TREE_TYPE (lhs);
2848 tree signed_type, unsigned_type;
2849 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2850 enum machine_mode lmode, rmode, nmode;
2851 int lunsignedp, runsignedp;
2852 int lvolatilep = 0, rvolatilep = 0;
2853 tree linner, rinner = NULL_TREE;
2857 /* Get all the information about the extractions being done. If the bit size
2858 if the same as the size of the underlying object, we aren't doing an
2859 extraction at all and so can do nothing. We also don't want to
2860 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2861 then will no longer be able to replace it. */
2862 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2863 &lunsignedp, &lvolatilep);
2864 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2865 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2870 /* If this is not a constant, we can only do something if bit positions,
2871 sizes, and signedness are the same. */
2872 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2873 &runsignedp, &rvolatilep);
2875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2876 || lunsignedp != runsignedp || offset != 0
2877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2881 /* See if we can find a mode to refer to this field. We should be able to,
2882 but fail if we can't. */
2883 nmode = get_best_mode (lbitsize, lbitpos,
2884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2886 TYPE_ALIGN (TREE_TYPE (rinner))),
2887 word_mode, lvolatilep || rvolatilep);
2888 if (nmode == VOIDmode)
2891 /* Set signed and unsigned types of the precision of this mode for the
2893 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2894 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2896 /* Compute the bit position and size for the new reference and our offset
2897 within it. If the new reference is the same size as the original, we
2898 won't optimize anything, so return zero. */
2899 nbitsize = GET_MODE_BITSIZE (nmode);
2900 nbitpos = lbitpos & ~ (nbitsize - 1);
2902 if (nbitsize == lbitsize)
2905 if (BYTES_BIG_ENDIAN)
2906 lbitpos = nbitsize - lbitsize - lbitpos;
2908 /* Make the mask to be used against the extracted field. */
2909 mask = build_int_2 (~0, ~0);
2910 TREE_TYPE (mask) = unsigned_type;
2911 force_fit_type (mask, 0);
2912 mask = fold_convert (unsigned_type, mask);
2913 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2914 mask = const_binop (RSHIFT_EXPR, mask,
2915 size_int (nbitsize - lbitsize - lbitpos), 0);
2918 /* If not comparing with constant, just rework the comparison
2920 return build (code, compare_type,
2921 build (BIT_AND_EXPR, unsigned_type,
2922 make_bit_field_ref (linner, unsigned_type,
2923 nbitsize, nbitpos, 1),
2925 build (BIT_AND_EXPR, unsigned_type,
2926 make_bit_field_ref (rinner, unsigned_type,
2927 nbitsize, nbitpos, 1),
2930 /* Otherwise, we are handling the constant case. See if the constant is too
2931 big for the field. Warn and return a tree of for 0 (false) if so. We do
2932 this not only for its own sake, but to avoid having to test for this
2933 error case below. If we didn't, we might generate wrong code.
2935 For unsigned fields, the constant shifted right by the field length should
2936 be all zero. For signed fields, the high-order bits should agree with
2941 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2942 fold_convert (unsigned_type, rhs),
2943 size_int (lbitsize), 0)))
2945 warning ("comparison is always %d due to width of bit-field",
2947 return fold_convert (compare_type,
2949 ? integer_one_node : integer_zero_node));
2954 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2955 size_int (lbitsize - 1), 0);
2956 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2958 warning ("comparison is always %d due to width of bit-field",
2960 return fold_convert (compare_type,
2962 ? integer_one_node : integer_zero_node));
2966 /* Single-bit compares should always be against zero. */
2967 if (lbitsize == 1 && ! integer_zerop (rhs))
2969 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2970 rhs = fold_convert (type, integer_zero_node);
2973 /* Make a new bitfield reference, shift the constant over the
2974 appropriate number of bits and mask it with the computed mask
2975 (in case this was a signed field). If we changed it, make a new one. */
2976 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2979 TREE_SIDE_EFFECTS (lhs) = 1;
2980 TREE_THIS_VOLATILE (lhs) = 1;
2983 rhs = fold (const_binop (BIT_AND_EXPR,
2984 const_binop (LSHIFT_EXPR,
2985 fold_convert (unsigned_type, rhs),
2986 size_int (lbitpos), 0),
2989 return build (code, compare_type,
2990 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2994 /* Subroutine for fold_truthop: decode a field reference.
2996 If EXP is a comparison reference, we return the innermost reference.
2998 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2999 set to the starting bit number.
3001 If the innermost field can be completely contained in a mode-sized
3002 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3004 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3005 otherwise it is not changed.
3007 *PUNSIGNEDP is set to the signedness of the field.
3009 *PMASK is set to the mask used. This is either contained in a
3010 BIT_AND_EXPR or derived from the width of the field.
3012 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3014 Return 0 if this is not a component reference or is one that we can't
3015 do anything with. */
3018 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3019 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3020 int *punsignedp, int *pvolatilep,
3021 tree *pmask, tree *pand_mask)
3023 tree outer_type = 0;
3025 tree mask, inner, offset;
3027 unsigned int precision;
3029 /* All the optimizations using this function assume integer fields.
3030 There are problems with FP fields since the type_for_size call
3031 below can fail for, e.g., XFmode. */
3032 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3035 /* We are interested in the bare arrangement of bits, so strip everything
3036 that doesn't affect the machine mode. However, record the type of the
3037 outermost expression if it may matter below. */
3038 if (TREE_CODE (exp) == NOP_EXPR
3039 || TREE_CODE (exp) == CONVERT_EXPR
3040 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3041 outer_type = TREE_TYPE (exp);
3044 if (TREE_CODE (exp) == BIT_AND_EXPR)
3046 and_mask = TREE_OPERAND (exp, 1);
3047 exp = TREE_OPERAND (exp, 0);
3048 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3049 if (TREE_CODE (and_mask) != INTEGER_CST)
3053 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3054 punsignedp, pvolatilep);
3055 if ((inner == exp && and_mask == 0)
3056 || *pbitsize < 0 || offset != 0
3057 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3060 /* If the number of bits in the reference is the same as the bitsize of
3061 the outer type, then the outer type gives the signedness. Otherwise
3062 (in case of a small bitfield) the signedness is unchanged. */
3063 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3064 *punsignedp = TYPE_UNSIGNED (outer_type);
3066 /* Compute the mask to access the bitfield. */
3067 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3068 precision = TYPE_PRECISION (unsigned_type);
3070 mask = build_int_2 (~0, ~0);
3071 TREE_TYPE (mask) = unsigned_type;
3072 force_fit_type (mask, 0);
3073 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3074 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3076 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3078 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3079 fold_convert (unsigned_type, and_mask), mask));
3082 *pand_mask = and_mask;
3086 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3090 all_ones_mask_p (tree mask, int size)
3092 tree type = TREE_TYPE (mask);
3093 unsigned int precision = TYPE_PRECISION (type);
3096 tmask = build_int_2 (~0, ~0);
3097 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3098 force_fit_type (tmask, 0);
3100 tree_int_cst_equal (mask,
3101 const_binop (RSHIFT_EXPR,
3102 const_binop (LSHIFT_EXPR, tmask,
3103 size_int (precision - size),
3105 size_int (precision - size), 0));
3108 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3109 represents the sign bit of EXP's type. If EXP represents a sign
3110 or zero extension, also test VAL against the unextended type.
3111 The return value is the (sub)expression whose sign bit is VAL,
3112 or NULL_TREE otherwise. */
3115 sign_bit_p (tree exp, tree val)
3117 unsigned HOST_WIDE_INT mask_lo, lo;
3118 HOST_WIDE_INT mask_hi, hi;
3122 /* Tree EXP must have an integral type. */
3123 t = TREE_TYPE (exp);
3124 if (! INTEGRAL_TYPE_P (t))
3127 /* Tree VAL must be an integer constant. */
3128 if (TREE_CODE (val) != INTEGER_CST
3129 || TREE_CONSTANT_OVERFLOW (val))
3132 width = TYPE_PRECISION (t);
3133 if (width > HOST_BITS_PER_WIDE_INT)
3135 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3138 mask_hi = ((unsigned HOST_WIDE_INT) -1
3139 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3145 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3148 mask_lo = ((unsigned HOST_WIDE_INT) -1
3149 >> (HOST_BITS_PER_WIDE_INT - width));
3152 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3153 treat VAL as if it were unsigned. */
3154 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3155 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3158 /* Handle extension from a narrower type. */
3159 if (TREE_CODE (exp) == NOP_EXPR
3160 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3161 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3166 /* Subroutine for fold_truthop: determine if an operand is simple enough
3167 to be evaluated unconditionally. */
3170 simple_operand_p (tree exp)
3172 /* Strip any conversions that don't change the machine mode. */
3173 while ((TREE_CODE (exp) == NOP_EXPR
3174 || TREE_CODE (exp) == CONVERT_EXPR)
3175 && (TYPE_MODE (TREE_TYPE (exp))
3176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3177 exp = TREE_OPERAND (exp, 0);
3179 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3181 && ! TREE_ADDRESSABLE (exp)
3182 && ! TREE_THIS_VOLATILE (exp)
3183 && ! DECL_NONLOCAL (exp)
3184 /* Don't regard global variables as simple. They may be
3185 allocated in ways unknown to the compiler (shared memory,
3186 #pragma weak, etc). */
3187 && ! TREE_PUBLIC (exp)
3188 && ! DECL_EXTERNAL (exp)
3189 /* Loading a static variable is unduly expensive, but global
3190 registers aren't expensive. */
3191 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3194 /* The following functions are subroutines to fold_range_test and allow it to
3195 try to change a logical combination of comparisons into a range test.
3198 X == 2 || X == 3 || X == 4 || X == 5
3202 (unsigned) (X - 2) <= 3
3204 We describe each set of comparisons as being either inside or outside
3205 a range, using a variable named like IN_P, and then describe the
3206 range with a lower and upper bound. If one of the bounds is omitted,
3207 it represents either the highest or lowest value of the type.
3209 In the comments below, we represent a range by two numbers in brackets
3210 preceded by a "+" to designate being inside that range, or a "-" to
3211 designate being outside that range, so the condition can be inverted by
3212 flipping the prefix. An omitted bound is represented by a "-". For
3213 example, "- [-, 10]" means being outside the range starting at the lowest
3214 possible value and ending at 10, in other words, being greater than 10.
3215 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3218 We set up things so that the missing bounds are handled in a consistent
3219 manner so neither a missing bound nor "true" and "false" need to be
3220 handled using a special case. */
3222 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3223 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3224 and UPPER1_P are nonzero if the respective argument is an upper bound
3225 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3226 must be specified for a comparison. ARG1 will be converted to ARG0's
3227 type if both are specified. */
3230 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3231 tree arg1, int upper1_p)
3237 /* If neither arg represents infinity, do the normal operation.
3238 Else, if not a comparison, return infinity. Else handle the special
3239 comparison rules. Note that most of the cases below won't occur, but
3240 are handled for consistency. */
3242 if (arg0 != 0 && arg1 != 0)
3244 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3245 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3247 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3250 if (TREE_CODE_CLASS (code) != '<')
3253 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3254 for neither. In real maths, we cannot assume open ended ranges are
3255 the same. But, this is computer arithmetic, where numbers are finite.
3256 We can therefore make the transformation of any unbounded range with
3257 the value Z, Z being greater than any representable number. This permits
3258 us to treat unbounded ranges as equal. */
3259 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3260 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3264 result = sgn0 == sgn1;
3267 result = sgn0 != sgn1;
3270 result = sgn0 < sgn1;
3273 result = sgn0 <= sgn1;
3276 result = sgn0 > sgn1;
3279 result = sgn0 >= sgn1;
3285 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3288 /* Given EXP, a logical expression, set the range it is testing into
3289 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3290 actually being tested. *PLOW and *PHIGH will be made of the same type
3291 as the returned expression. If EXP is not a comparison, we will most
3292 likely not be returning a useful value and range. */
3295 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3297 enum tree_code code;
3298 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3299 tree orig_type = NULL_TREE;
3301 tree low, high, n_low, n_high;
3303 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3304 and see if we can refine the range. Some of the cases below may not
3305 happen, but it doesn't seem worth worrying about this. We "continue"
3306 the outer loop when we've changed something; otherwise we "break"
3307 the switch, which will "break" the while. */
3310 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3314 code = TREE_CODE (exp);
3316 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3318 if (first_rtl_op (code) > 0)
3319 arg0 = TREE_OPERAND (exp, 0);
3320 if (TREE_CODE_CLASS (code) == '<'
3321 || TREE_CODE_CLASS (code) == '1'
3322 || TREE_CODE_CLASS (code) == '2')
3323 type = TREE_TYPE (arg0);
3324 if (TREE_CODE_CLASS (code) == '2'
3325 || TREE_CODE_CLASS (code) == '<'
3326 || (TREE_CODE_CLASS (code) == 'e'
3327 && TREE_CODE_LENGTH (code) > 1))
3328 arg1 = TREE_OPERAND (exp, 1);
3331 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3332 lose a cast by accident. */
3333 if (type != NULL_TREE && orig_type == NULL_TREE)
3338 case TRUTH_NOT_EXPR:
3339 in_p = ! in_p, exp = arg0;
3342 case EQ_EXPR: case NE_EXPR:
3343 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3344 /* We can only do something if the range is testing for zero
3345 and if the second operand is an integer constant. Note that
3346 saying something is "in" the range we make is done by
3347 complementing IN_P since it will set in the initial case of
3348 being not equal to zero; "out" is leaving it alone. */
3349 if (low == 0 || high == 0
3350 || ! integer_zerop (low) || ! integer_zerop (high)
3351 || TREE_CODE (arg1) != INTEGER_CST)
3356 case NE_EXPR: /* - [c, c] */
3359 case EQ_EXPR: /* + [c, c] */
3360 in_p = ! in_p, low = high = arg1;
3362 case GT_EXPR: /* - [-, c] */
3363 low = 0, high = arg1;
3365 case GE_EXPR: /* + [c, -] */
3366 in_p = ! in_p, low = arg1, high = 0;
3368 case LT_EXPR: /* - [c, -] */
3369 low = arg1, high = 0;
3371 case LE_EXPR: /* + [-, c] */
3372 in_p = ! in_p, low = 0, high = arg1;
3380 /* If this is an unsigned comparison, we also know that EXP is
3381 greater than or equal to zero. We base the range tests we make
3382 on that fact, so we record it here so we can parse existing
3384 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3386 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3387 1, fold_convert (type, integer_zero_node),
3391 in_p = n_in_p, low = n_low, high = n_high;
3393 /* If the high bound is missing, but we have a nonzero low
3394 bound, reverse the range so it goes from zero to the low bound
3396 if (high == 0 && low && ! integer_zerop (low))
3399 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3400 integer_one_node, 0);
3401 low = fold_convert (type, integer_zero_node);
3407 /* (-x) IN [a,b] -> x in [-b, -a] */
3408 n_low = range_binop (MINUS_EXPR, type,
3409 fold_convert (type, integer_zero_node),
3411 n_high = range_binop (MINUS_EXPR, type,
3412 fold_convert (type, integer_zero_node),
3414 low = n_low, high = n_high;
3420 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3421 fold_convert (type, integer_one_node));
3424 case PLUS_EXPR: case MINUS_EXPR:
3425 if (TREE_CODE (arg1) != INTEGER_CST)
3428 /* If EXP is signed, any overflow in the computation is undefined,
3429 so we don't worry about it so long as our computations on
3430 the bounds don't overflow. For unsigned, overflow is defined
3431 and this is exactly the right thing. */
3432 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3433 type, low, 0, arg1, 0);
3434 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3435 type, high, 1, arg1, 0);
3436 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3437 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3440 /* Check for an unsigned range which has wrapped around the maximum
3441 value thus making n_high < n_low, and normalize it. */
3442 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3444 low = range_binop (PLUS_EXPR, type, n_high, 0,
3445 integer_one_node, 0);
3446 high = range_binop (MINUS_EXPR, type, n_low, 0,
3447 integer_one_node, 0);
3449 /* If the range is of the form +/- [ x+1, x ], we won't
3450 be able to normalize it. But then, it represents the
3451 whole range or the empty set, so make it
3453 if (tree_int_cst_equal (n_low, low)
3454 && tree_int_cst_equal (n_high, high))
3460 low = n_low, high = n_high;
3465 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3466 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3469 if (! INTEGRAL_TYPE_P (type)
3470 || (low != 0 && ! int_fits_type_p (low, type))
3471 || (high != 0 && ! int_fits_type_p (high, type)))
3474 n_low = low, n_high = high;
3477 n_low = fold_convert (type, n_low);
3480 n_high = fold_convert (type, n_high);
3482 /* If we're converting from an unsigned to a signed type,
3483 we will be doing the comparison as unsigned. The tests above
3484 have already verified that LOW and HIGH are both positive.
3486 So we have to make sure that the original unsigned value will
3487 be interpreted as positive. */
3488 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3490 tree equiv_type = lang_hooks.types.type_for_mode
3491 (TYPE_MODE (type), 1);
3494 /* A range without an upper bound is, naturally, unbounded.
3495 Since convert would have cropped a very large value, use
3496 the max value for the destination type. */
3498 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3499 : TYPE_MAX_VALUE (type);
3501 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3502 high_positive = fold (build (RSHIFT_EXPR, type,
3506 integer_one_node)));
3508 /* If the low bound is specified, "and" the range with the
3509 range for which the original unsigned value will be
3513 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3514 1, n_low, n_high, 1,
3515 fold_convert (type, integer_zero_node),
3519 in_p = (n_in_p == in_p);
3523 /* Otherwise, "or" the range with the range of the input
3524 that will be interpreted as negative. */
3525 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3526 0, n_low, n_high, 1,
3527 fold_convert (type, integer_zero_node),
3531 in_p = (in_p != n_in_p);
3536 low = n_low, high = n_high;
3546 /* If EXP is a constant, we can evaluate whether this is true or false. */
3547 if (TREE_CODE (exp) == INTEGER_CST)
3549 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3551 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3557 *pin_p = in_p, *plow = low, *phigh = high;
3561 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3562 type, TYPE, return an expression to test if EXP is in (or out of, depending
3563 on IN_P) the range. */
3566 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3568 tree etype = TREE_TYPE (exp);
3572 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3573 return invert_truthvalue (value);
3575 if (low == 0 && high == 0)
3576 return fold_convert (type, integer_one_node);
3579 return fold (build (LE_EXPR, type, exp, high));
3582 return fold (build (GE_EXPR, type, exp, low));
3584 if (operand_equal_p (low, high, 0))
3585 return fold (build (EQ_EXPR, type, exp, low));
3587 if (integer_zerop (low))
3589 if (! TYPE_UNSIGNED (etype))
3591 etype = lang_hooks.types.unsigned_type (etype);
3592 high = fold_convert (etype, high);
3593 exp = fold_convert (etype, exp);
3595 return build_range_check (type, exp, 1, 0, high);
3598 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3599 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3601 unsigned HOST_WIDE_INT lo;
3605 prec = TYPE_PRECISION (etype);
3606 if (prec <= HOST_BITS_PER_WIDE_INT)
3609 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3613 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3614 lo = (unsigned HOST_WIDE_INT) -1;
3617 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3619 if (TYPE_UNSIGNED (etype))
3621 etype = lang_hooks.types.signed_type (etype);
3622 exp = fold_convert (etype, exp);
3624 return fold (build (GT_EXPR, type, exp,
3625 fold_convert (etype, integer_zero_node)));
3629 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3630 && ! TREE_OVERFLOW (value))
3631 return build_range_check (type,
3632 fold (build (MINUS_EXPR, etype, exp, low)),
3633 1, fold_convert (etype, integer_zero_node),
3639 /* Given two ranges, see if we can merge them into one. Return 1 if we
3640 can, 0 if we can't. Set the output range into the specified parameters. */
3643 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3644 tree high0, int in1_p, tree low1, tree high1)
3652 int lowequal = ((low0 == 0 && low1 == 0)
3653 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3654 low0, 0, low1, 0)));
3655 int highequal = ((high0 == 0 && high1 == 0)
3656 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3657 high0, 1, high1, 1)));
3659 /* Make range 0 be the range that starts first, or ends last if they
3660 start at the same value. Swap them if it isn't. */
3661 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3664 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3665 high1, 1, high0, 1))))
3667 temp = in0_p, in0_p = in1_p, in1_p = temp;
3668 tem = low0, low0 = low1, low1 = tem;
3669 tem = high0, high0 = high1, high1 = tem;
3672 /* Now flag two cases, whether the ranges are disjoint or whether the
3673 second range is totally subsumed in the first. Note that the tests
3674 below are simplified by the ones above. */
3675 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3676 high0, 1, low1, 0));
3677 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3678 high1, 1, high0, 1));
3680 /* We now have four cases, depending on whether we are including or
3681 excluding the two ranges. */
3684 /* If they don't overlap, the result is false. If the second range
3685 is a subset it is the result. Otherwise, the range is from the start
3686 of the second to the end of the first. */
3688 in_p = 0, low = high = 0;
3690 in_p = 1, low = low1, high = high1;
3692 in_p = 1, low = low1, high = high0;
3695 else if (in0_p && ! in1_p)
3697 /* If they don't overlap, the result is the first range. If they are
3698 equal, the result is false. If the second range is a subset of the
3699 first, and the ranges begin at the same place, we go from just after
3700 the end of the first range to the end of the second. If the second
3701 range is not a subset of the first, or if it is a subset and both
3702 ranges end at the same place, the range starts at the start of the
3703 first range and ends just before the second range.
3704 Otherwise, we can't describe this as a single range. */
3706 in_p = 1, low = low0, high = high0;
3707 else if (lowequal && highequal)
3708 in_p = 0, low = high = 0;
3709 else if (subset && lowequal)
3711 in_p = 1, high = high0;
3712 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3713 integer_one_node, 0);
3715 else if (! subset || highequal)
3717 in_p = 1, low = low0;
3718 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3719 integer_one_node, 0);
3725 else if (! in0_p && in1_p)
3727 /* If they don't overlap, the result is the second range. If the second
3728 is a subset of the first, the result is false. Otherwise,
3729 the range starts just after the first range and ends at the
3730 end of the second. */
3732 in_p = 1, low = low1, high = high1;
3733 else if (subset || highequal)
3734 in_p = 0, low = high = 0;
3737 in_p = 1, high = high1;
3738 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3739 integer_one_node, 0);
3745 /* The case where we are excluding both ranges. Here the complex case
3746 is if they don't overlap. In that case, the only time we have a
3747 range is if they are adjacent. If the second is a subset of the
3748 first, the result is the first. Otherwise, the range to exclude
3749 starts at the beginning of the first range and ends at the end of the
3753 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3754 range_binop (PLUS_EXPR, NULL_TREE,
3756 integer_one_node, 1),
3758 in_p = 0, low = low0, high = high1;
3763 in_p = 0, low = low0, high = high0;
3765 in_p = 0, low = low0, high = high1;
3768 *pin_p = in_p, *plow = low, *phigh = high;
3772 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3773 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3776 /* EXP is some logical combination of boolean tests. See if we can
3777 merge it into some range test. Return the new tree if so. */
3780 fold_range_test (tree exp)
3782 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3783 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3784 int in0_p, in1_p, in_p;
3785 tree low0, low1, low, high0, high1, high;
3786 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3787 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3790 /* If this is an OR operation, invert both sides; we will invert
3791 again at the end. */
3793 in0_p = ! in0_p, in1_p = ! in1_p;
3795 /* If both expressions are the same, if we can merge the ranges, and we
3796 can build the range test, return it or it inverted. If one of the
3797 ranges is always true or always false, consider it to be the same
3798 expression as the other. */
3799 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3800 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3802 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3804 : rhs != 0 ? rhs : integer_zero_node,
3806 return or_op ? invert_truthvalue (tem) : tem;
3808 /* On machines where the branch cost is expensive, if this is a
3809 short-circuited branch and the underlying object on both sides
3810 is the same, make a non-short-circuit operation. */
3811 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3812 && lhs != 0 && rhs != 0
3813 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3814 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3815 && operand_equal_p (lhs, rhs, 0))
3817 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3818 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3819 which cases we can't do this. */
3820 if (simple_operand_p (lhs))
3821 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3822 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3823 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3824 TREE_OPERAND (exp, 1));
3826 else if (lang_hooks.decls.global_bindings_p () == 0
3827 && ! CONTAINS_PLACEHOLDER_P (lhs))
3829 tree common = save_expr (lhs);
3831 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3832 or_op ? ! in0_p : in0_p,
3834 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3835 or_op ? ! in1_p : in1_p,
3837 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3838 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3839 TREE_TYPE (exp), lhs, rhs);
3846 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3847 bit value. Arrange things so the extra bits will be set to zero if and
3848 only if C is signed-extended to its full width. If MASK is nonzero,
3849 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3852 unextend (tree c, int p, int unsignedp, tree mask)
3854 tree type = TREE_TYPE (c);
3855 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3858 if (p == modesize || unsignedp)
3861 /* We work by getting just the sign bit into the low-order bit, then
3862 into the high-order bit, then sign-extend. We then XOR that value
3864 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3865 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3867 /* We must use a signed type in order to get an arithmetic right shift.
3868 However, we must also avoid introducing accidental overflows, so that
3869 a subsequent call to integer_zerop will work. Hence we must
3870 do the type conversion here. At this point, the constant is either
3871 zero or one, and the conversion to a signed type can never overflow.
3872 We could get an overflow if this conversion is done anywhere else. */
3873 if (TYPE_UNSIGNED (type))
3874 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3876 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3877 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3879 temp = const_binop (BIT_AND_EXPR, temp,
3880 fold_convert (TREE_TYPE (c), mask), 0);
3881 /* If necessary, convert the type back to match the type of C. */
3882 if (TYPE_UNSIGNED (type))
3883 temp = fold_convert (type, temp);
3885 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3888 /* Find ways of folding logical expressions of LHS and RHS:
3889 Try to merge two comparisons to the same innermost item.
3890 Look for range tests like "ch >= '0' && ch <= '9'".
3891 Look for combinations of simple terms on machines with expensive branches
3892 and evaluate the RHS unconditionally.
3894 For example, if we have p->a == 2 && p->b == 4 and we can make an
3895 object large enough to span both A and B, we can do this with a comparison
3896 against the object ANDed with the a mask.
3898 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3899 operations to do this with one comparison.
3901 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3902 function and the one above.
3904 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3905 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3907 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3910 We return the simplified tree or 0 if no optimization is possible. */
3913 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3915 /* If this is the "or" of two comparisons, we can do something if
3916 the comparisons are NE_EXPR. If this is the "and", we can do something
3917 if the comparisons are EQ_EXPR. I.e.,
3918 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3920 WANTED_CODE is this operation code. For single bit fields, we can
3921 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3922 comparison for one-bit fields. */
3924 enum tree_code wanted_code;
3925 enum tree_code lcode, rcode;
3926 tree ll_arg, lr_arg, rl_arg, rr_arg;
3927 tree ll_inner, lr_inner, rl_inner, rr_inner;
3928 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3929 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3930 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3931 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3932 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3933 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3934 enum machine_mode lnmode, rnmode;
3935 tree ll_mask, lr_mask, rl_mask, rr_mask;
3936 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3937 tree l_const, r_const;
3938 tree lntype, rntype, result;
3939 int first_bit, end_bit;
3942 /* Start by getting the comparison codes. Fail if anything is volatile.
3943 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3944 it were surrounded with a NE_EXPR. */
3946 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3949 lcode = TREE_CODE (lhs);
3950 rcode = TREE_CODE (rhs);
3952 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3953 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3955 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3956 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3958 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3961 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3964 ll_arg = TREE_OPERAND (lhs, 0);
3965 lr_arg = TREE_OPERAND (lhs, 1);
3966 rl_arg = TREE_OPERAND (rhs, 0);
3967 rr_arg = TREE_OPERAND (rhs, 1);
3969 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3970 if (simple_operand_p (ll_arg)
3971 && simple_operand_p (lr_arg)
3972 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3976 if (operand_equal_p (ll_arg, rl_arg, 0)
3977 && operand_equal_p (lr_arg, rr_arg, 0))
3979 int lcompcode, rcompcode;
3981 lcompcode = comparison_to_compcode (lcode);
3982 rcompcode = comparison_to_compcode (rcode);
3983 compcode = (code == TRUTH_AND_EXPR)
3984 ? lcompcode & rcompcode
3985 : lcompcode | rcompcode;