1c87b03e5Sespie /* Fold a constant sub-tree into a single node for C-compiler
2c87b03e5Sespie Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2002,
3c87b03e5Sespie 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4c87b03e5Sespie
5c87b03e5Sespie This file is part of GCC.
6c87b03e5Sespie
7c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
8c87b03e5Sespie the terms of the GNU General Public License as published by the Free
9c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
10c87b03e5Sespie version.
11c87b03e5Sespie
12c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
14c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15c87b03e5Sespie for more details.
16c87b03e5Sespie
17c87b03e5Sespie You should have received a copy of the GNU General Public License
18c87b03e5Sespie along with GCC; see the file COPYING. If not, write to the Free
19c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20c87b03e5Sespie 02111-1307, USA. */
21c87b03e5Sespie
22c87b03e5Sespie /*@@ This file should be rewritten to use an arbitrary precision
23c87b03e5Sespie @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24c87b03e5Sespie @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25c87b03e5Sespie @@ The routines that translate from the ap rep should
26c87b03e5Sespie @@ warn if precision et. al. is lost.
27c87b03e5Sespie @@ This would also make life easier when this technology is used
28c87b03e5Sespie @@ for cross-compilers. */
29c87b03e5Sespie
30c87b03e5Sespie /* The entry points in this file are fold, size_int_wide, size_binop
31c87b03e5Sespie and force_fit_type.
32c87b03e5Sespie
33c87b03e5Sespie fold takes a tree as argument and returns a simplified tree.
34c87b03e5Sespie
35c87b03e5Sespie size_binop takes a tree code for an arithmetic operation
36c87b03e5Sespie and two operands that are trees, and produces a tree for the
37c87b03e5Sespie result, assuming the type comes from `sizetype'.
38c87b03e5Sespie
39c87b03e5Sespie size_int takes an integer value, and creates a tree constant
40c87b03e5Sespie with type from `sizetype'.
41c87b03e5Sespie
42c87b03e5Sespie force_fit_type takes a constant and prior overflow indicator, and
43c87b03e5Sespie forces the value to fit the type. It returns an overflow indicator. */
44c87b03e5Sespie
45c87b03e5Sespie #include "config.h"
46c87b03e5Sespie #include "system.h"
47c87b03e5Sespie #include "flags.h"
48c87b03e5Sespie #include "tree.h"
49c87b03e5Sespie #include "real.h"
50c87b03e5Sespie #include "rtl.h"
51c87b03e5Sespie #include "expr.h"
52c87b03e5Sespie #include "tm_p.h"
53c87b03e5Sespie #include "toplev.h"
54c87b03e5Sespie #include "ggc.h"
55c87b03e5Sespie #include "hashtab.h"
56c87b03e5Sespie #include "langhooks.h"
57c87b03e5Sespie
58c87b03e5Sespie static void encode PARAMS ((HOST_WIDE_INT *,
59c87b03e5Sespie unsigned HOST_WIDE_INT,
60c87b03e5Sespie HOST_WIDE_INT));
61c87b03e5Sespie static void decode PARAMS ((HOST_WIDE_INT *,
62c87b03e5Sespie unsigned HOST_WIDE_INT *,
63c87b03e5Sespie HOST_WIDE_INT *));
64c87b03e5Sespie static tree negate_expr PARAMS ((tree));
65c87b03e5Sespie static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
66c87b03e5Sespie tree *, int));
67c87b03e5Sespie static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
68c87b03e5Sespie static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
69c87b03e5Sespie static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
70c87b03e5Sespie static hashval_t size_htab_hash PARAMS ((const void *));
71c87b03e5Sespie static int size_htab_eq PARAMS ((const void *, const void *));
72c87b03e5Sespie static tree fold_convert PARAMS ((tree, tree));
73c87b03e5Sespie static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74c87b03e5Sespie static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75c87b03e5Sespie static int comparison_to_compcode PARAMS ((enum tree_code));
76c87b03e5Sespie static enum tree_code compcode_to_comparison PARAMS ((int));
77c87b03e5Sespie static int truth_value_p PARAMS ((enum tree_code));
78c87b03e5Sespie static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
79c87b03e5Sespie static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
80c87b03e5Sespie static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
81c87b03e5Sespie static tree omit_one_operand PARAMS ((tree, tree, tree));
82c87b03e5Sespie static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
83c87b03e5Sespie static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
84c87b03e5Sespie static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
85c87b03e5Sespie static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
86c87b03e5Sespie tree, tree));
87c87b03e5Sespie static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
88c87b03e5Sespie HOST_WIDE_INT *,
89c87b03e5Sespie enum machine_mode *, int *,
90c87b03e5Sespie int *, tree *, tree *));
91c87b03e5Sespie static int all_ones_mask_p PARAMS ((tree, int));
92c87b03e5Sespie static tree sign_bit_p PARAMS ((tree, tree));
93c87b03e5Sespie static int simple_operand_p PARAMS ((tree));
94c87b03e5Sespie static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
95c87b03e5Sespie tree, int));
96c87b03e5Sespie static tree make_range PARAMS ((tree, int *, tree *, tree *));
97c87b03e5Sespie static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
98c87b03e5Sespie static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
99c87b03e5Sespie int, tree, tree));
100c87b03e5Sespie static tree fold_range_test PARAMS ((tree));
101c87b03e5Sespie static tree unextend PARAMS ((tree, int, int, tree));
102c87b03e5Sespie static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
103c87b03e5Sespie static tree optimize_minmax_comparison PARAMS ((tree));
104c87b03e5Sespie static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
105c87b03e5Sespie static tree extract_muldiv_1 PARAMS ((tree, tree, enum tree_code, tree));
106c87b03e5Sespie static tree strip_compound_expr PARAMS ((tree, tree));
107c87b03e5Sespie static int multiple_of_p PARAMS ((tree, tree, tree));
108c87b03e5Sespie static tree constant_boolean_node PARAMS ((int, tree));
109c87b03e5Sespie static int count_cond PARAMS ((tree, int));
110c87b03e5Sespie static tree fold_binary_op_with_conditional_arg
111c87b03e5Sespie PARAMS ((enum tree_code, tree, tree, tree, int));
112c87b03e5Sespie static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
113c87b03e5Sespie
114c87b03e5Sespie /* The following constants represent a bit based encoding of GCC's
115c87b03e5Sespie comparison operators. This encoding simplifies transformations
116c87b03e5Sespie on relational comparison operators, such as AND and OR. */
117c87b03e5Sespie #define COMPCODE_FALSE 0
118c87b03e5Sespie #define COMPCODE_LT 1
119c87b03e5Sespie #define COMPCODE_EQ 2
120c87b03e5Sespie #define COMPCODE_LE 3
121c87b03e5Sespie #define COMPCODE_GT 4
122c87b03e5Sespie #define COMPCODE_NE 5
123c87b03e5Sespie #define COMPCODE_GE 6
124c87b03e5Sespie #define COMPCODE_TRUE 7
125c87b03e5Sespie
126c87b03e5Sespie /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
127c87b03e5Sespie overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
128c87b03e5Sespie and SUM1. Then this yields nonzero if overflow occurred during the
129c87b03e5Sespie addition.
130c87b03e5Sespie
131c87b03e5Sespie Overflow occurs if A and B have the same sign, but A and SUM differ in
132c87b03e5Sespie sign. Use `^' to test whether signs differ, and `< 0' to isolate the
133c87b03e5Sespie sign. */
134c87b03e5Sespie #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
135c87b03e5Sespie
136c87b03e5Sespie /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
137c87b03e5Sespie We do that by representing the two-word integer in 4 words, with only
138c87b03e5Sespie HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
139c87b03e5Sespie number. The value of the word is LOWPART + HIGHPART * BASE. */
140c87b03e5Sespie
141c87b03e5Sespie #define LOWPART(x) \
142c87b03e5Sespie ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
143c87b03e5Sespie #define HIGHPART(x) \
144c87b03e5Sespie ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
145c87b03e5Sespie #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
146c87b03e5Sespie
147c87b03e5Sespie /* Unpack a two-word integer into 4 words.
148c87b03e5Sespie LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
149c87b03e5Sespie WORDS points to the array of HOST_WIDE_INTs. */
150c87b03e5Sespie
151c87b03e5Sespie static void
encode(words,low,hi)152c87b03e5Sespie encode (words, low, hi)
153c87b03e5Sespie HOST_WIDE_INT *words;
154c87b03e5Sespie unsigned HOST_WIDE_INT low;
155c87b03e5Sespie HOST_WIDE_INT hi;
156c87b03e5Sespie {
157c87b03e5Sespie words[0] = LOWPART (low);
158c87b03e5Sespie words[1] = HIGHPART (low);
159c87b03e5Sespie words[2] = LOWPART (hi);
160c87b03e5Sespie words[3] = HIGHPART (hi);
161c87b03e5Sespie }
162c87b03e5Sespie
163c87b03e5Sespie /* Pack an array of 4 words into a two-word integer.
164c87b03e5Sespie WORDS points to the array of words.
165c87b03e5Sespie The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166c87b03e5Sespie
167c87b03e5Sespie static void
decode(words,low,hi)168c87b03e5Sespie decode (words, low, hi)
169c87b03e5Sespie HOST_WIDE_INT *words;
170c87b03e5Sespie unsigned HOST_WIDE_INT *low;
171c87b03e5Sespie HOST_WIDE_INT *hi;
172c87b03e5Sespie {
173c87b03e5Sespie *low = words[0] + words[1] * BASE;
174c87b03e5Sespie *hi = words[2] + words[3] * BASE;
175c87b03e5Sespie }
176c87b03e5Sespie
177c87b03e5Sespie /* Make the integer constant T valid for its type by setting to 0 or 1 all
178c87b03e5Sespie the bits in the constant that don't belong in the type.
179c87b03e5Sespie
180c87b03e5Sespie Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
181c87b03e5Sespie nonzero, a signed overflow has already occurred in calculating T, so
182c87b03e5Sespie propagate it. */
183c87b03e5Sespie
184c87b03e5Sespie int
force_fit_type(t,overflow)185c87b03e5Sespie force_fit_type (t, overflow)
186c87b03e5Sespie tree t;
187c87b03e5Sespie int overflow;
188c87b03e5Sespie {
189c87b03e5Sespie unsigned HOST_WIDE_INT low;
190c87b03e5Sespie HOST_WIDE_INT high;
191c87b03e5Sespie unsigned int prec;
192c87b03e5Sespie
193c87b03e5Sespie if (TREE_CODE (t) == REAL_CST)
194c87b03e5Sespie {
195c87b03e5Sespie /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
196c87b03e5Sespie Consider doing it via real_convert now. */
197c87b03e5Sespie return overflow;
198c87b03e5Sespie }
199c87b03e5Sespie
200c87b03e5Sespie else if (TREE_CODE (t) != INTEGER_CST)
201c87b03e5Sespie return overflow;
202c87b03e5Sespie
203c87b03e5Sespie low = TREE_INT_CST_LOW (t);
204c87b03e5Sespie high = TREE_INT_CST_HIGH (t);
205c87b03e5Sespie
206c87b03e5Sespie if (POINTER_TYPE_P (TREE_TYPE (t)))
207c87b03e5Sespie prec = POINTER_SIZE;
208c87b03e5Sespie else
209c87b03e5Sespie prec = TYPE_PRECISION (TREE_TYPE (t));
210c87b03e5Sespie
211c87b03e5Sespie /* First clear all bits that are beyond the type's precision. */
212c87b03e5Sespie
213c87b03e5Sespie if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214c87b03e5Sespie ;
215c87b03e5Sespie else if (prec > HOST_BITS_PER_WIDE_INT)
216c87b03e5Sespie TREE_INT_CST_HIGH (t)
217c87b03e5Sespie &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218c87b03e5Sespie else
219c87b03e5Sespie {
220c87b03e5Sespie TREE_INT_CST_HIGH (t) = 0;
221c87b03e5Sespie if (prec < HOST_BITS_PER_WIDE_INT)
222c87b03e5Sespie TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
223c87b03e5Sespie }
224c87b03e5Sespie
225c87b03e5Sespie /* Unsigned types do not suffer sign extension or overflow unless they
226c87b03e5Sespie are a sizetype. */
227c87b03e5Sespie if (TREE_UNSIGNED (TREE_TYPE (t))
228c87b03e5Sespie && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229c87b03e5Sespie && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230c87b03e5Sespie return overflow;
231c87b03e5Sespie
232c87b03e5Sespie /* If the value's sign bit is set, extend the sign. */
233c87b03e5Sespie if (prec != 2 * HOST_BITS_PER_WIDE_INT
234c87b03e5Sespie && (prec > HOST_BITS_PER_WIDE_INT
235c87b03e5Sespie ? 0 != (TREE_INT_CST_HIGH (t)
236c87b03e5Sespie & ((HOST_WIDE_INT) 1
237c87b03e5Sespie << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238c87b03e5Sespie : 0 != (TREE_INT_CST_LOW (t)
239c87b03e5Sespie & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240c87b03e5Sespie {
241c87b03e5Sespie /* Value is negative:
242c87b03e5Sespie set to 1 all the bits that are outside this type's precision. */
243c87b03e5Sespie if (prec > HOST_BITS_PER_WIDE_INT)
244c87b03e5Sespie TREE_INT_CST_HIGH (t)
245c87b03e5Sespie |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246c87b03e5Sespie else
247c87b03e5Sespie {
248c87b03e5Sespie TREE_INT_CST_HIGH (t) = -1;
249c87b03e5Sespie if (prec < HOST_BITS_PER_WIDE_INT)
250c87b03e5Sespie TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
251c87b03e5Sespie }
252c87b03e5Sespie }
253c87b03e5Sespie
254c87b03e5Sespie /* Return nonzero if signed overflow occurred. */
255c87b03e5Sespie return
256c87b03e5Sespie ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
257c87b03e5Sespie != 0);
258c87b03e5Sespie }
259c87b03e5Sespie
260c87b03e5Sespie /* Add two doubleword integers with doubleword result.
261c87b03e5Sespie Each argument is given as two `HOST_WIDE_INT' pieces.
262c87b03e5Sespie One argument is L1 and H1; the other, L2 and H2.
263c87b03e5Sespie The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
264c87b03e5Sespie
265c87b03e5Sespie int
add_double(l1,h1,l2,h2,lv,hv)266c87b03e5Sespie add_double (l1, h1, l2, h2, lv, hv)
267c87b03e5Sespie unsigned HOST_WIDE_INT l1, l2;
268c87b03e5Sespie HOST_WIDE_INT h1, h2;
269c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
270c87b03e5Sespie HOST_WIDE_INT *hv;
271c87b03e5Sespie {
272c87b03e5Sespie unsigned HOST_WIDE_INT l;
273c87b03e5Sespie HOST_WIDE_INT h;
274c87b03e5Sespie
275c87b03e5Sespie l = l1 + l2;
276c87b03e5Sespie h = h1 + h2 + (l < l1);
277c87b03e5Sespie
278c87b03e5Sespie *lv = l;
279c87b03e5Sespie *hv = h;
280c87b03e5Sespie return OVERFLOW_SUM_SIGN (h1, h2, h);
281c87b03e5Sespie }
282c87b03e5Sespie
283c87b03e5Sespie /* Negate a doubleword integer with doubleword result.
284c87b03e5Sespie Return nonzero if the operation overflows, assuming it's signed.
285c87b03e5Sespie The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
286c87b03e5Sespie The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
287c87b03e5Sespie
288c87b03e5Sespie int
neg_double(l1,h1,lv,hv)289c87b03e5Sespie neg_double (l1, h1, lv, hv)
290c87b03e5Sespie unsigned HOST_WIDE_INT l1;
291c87b03e5Sespie HOST_WIDE_INT h1;
292c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
293c87b03e5Sespie HOST_WIDE_INT *hv;
294c87b03e5Sespie {
295c87b03e5Sespie if (l1 == 0)
296c87b03e5Sespie {
297c87b03e5Sespie *lv = 0;
298c87b03e5Sespie *hv = - h1;
299c87b03e5Sespie return (*hv & h1) < 0;
300c87b03e5Sespie }
301c87b03e5Sespie else
302c87b03e5Sespie {
303c87b03e5Sespie *lv = -l1;
304c87b03e5Sespie *hv = ~h1;
305c87b03e5Sespie return 0;
306c87b03e5Sespie }
307c87b03e5Sespie }
308c87b03e5Sespie
309c87b03e5Sespie /* Multiply two doubleword integers with doubleword result.
310c87b03e5Sespie Return nonzero if the operation overflows, assuming it's signed.
311c87b03e5Sespie Each argument is given as two `HOST_WIDE_INT' pieces.
312c87b03e5Sespie One argument is L1 and H1; the other, L2 and H2.
313c87b03e5Sespie The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314c87b03e5Sespie
315c87b03e5Sespie int
mul_double(l1,h1,l2,h2,lv,hv)316c87b03e5Sespie mul_double (l1, h1, l2, h2, lv, hv)
317c87b03e5Sespie unsigned HOST_WIDE_INT l1, l2;
318c87b03e5Sespie HOST_WIDE_INT h1, h2;
319c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
320c87b03e5Sespie HOST_WIDE_INT *hv;
321c87b03e5Sespie {
322c87b03e5Sespie HOST_WIDE_INT arg1[4];
323c87b03e5Sespie HOST_WIDE_INT arg2[4];
324c87b03e5Sespie HOST_WIDE_INT prod[4 * 2];
325c87b03e5Sespie unsigned HOST_WIDE_INT carry;
326c87b03e5Sespie int i, j, k;
327c87b03e5Sespie unsigned HOST_WIDE_INT toplow, neglow;
328c87b03e5Sespie HOST_WIDE_INT tophigh, neghigh;
329c87b03e5Sespie
330c87b03e5Sespie encode (arg1, l1, h1);
331c87b03e5Sespie encode (arg2, l2, h2);
332c87b03e5Sespie
333c87b03e5Sespie memset ((char *) prod, 0, sizeof prod);
334c87b03e5Sespie
335c87b03e5Sespie for (i = 0; i < 4; i++)
336c87b03e5Sespie {
337c87b03e5Sespie carry = 0;
338c87b03e5Sespie for (j = 0; j < 4; j++)
339c87b03e5Sespie {
340c87b03e5Sespie k = i + j;
341c87b03e5Sespie /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
342c87b03e5Sespie carry += arg1[i] * arg2[j];
343c87b03e5Sespie /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
344c87b03e5Sespie carry += prod[k];
345c87b03e5Sespie prod[k] = LOWPART (carry);
346c87b03e5Sespie carry = HIGHPART (carry);
347c87b03e5Sespie }
348c87b03e5Sespie prod[i + 4] = carry;
349c87b03e5Sespie }
350c87b03e5Sespie
351c87b03e5Sespie decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
352c87b03e5Sespie
353c87b03e5Sespie /* Check for overflow by calculating the top half of the answer in full;
354c87b03e5Sespie it should agree with the low half's sign bit. */
355c87b03e5Sespie decode (prod + 4, &toplow, &tophigh);
356c87b03e5Sespie if (h1 < 0)
357c87b03e5Sespie {
358c87b03e5Sespie neg_double (l2, h2, &neglow, &neghigh);
359c87b03e5Sespie add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
360c87b03e5Sespie }
361c87b03e5Sespie if (h2 < 0)
362c87b03e5Sespie {
363c87b03e5Sespie neg_double (l1, h1, &neglow, &neghigh);
364c87b03e5Sespie add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365c87b03e5Sespie }
366c87b03e5Sespie return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
367c87b03e5Sespie }
368c87b03e5Sespie
369c87b03e5Sespie /* Shift the doubleword integer in L1, H1 left by COUNT places
370c87b03e5Sespie keeping only PREC bits of result.
371c87b03e5Sespie Shift right if COUNT is negative.
372c87b03e5Sespie ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
373c87b03e5Sespie Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374c87b03e5Sespie
375c87b03e5Sespie void
lshift_double(l1,h1,count,prec,lv,hv,arith)376c87b03e5Sespie lshift_double (l1, h1, count, prec, lv, hv, arith)
377c87b03e5Sespie unsigned HOST_WIDE_INT l1;
378c87b03e5Sespie HOST_WIDE_INT h1, count;
379c87b03e5Sespie unsigned int prec;
380c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
381c87b03e5Sespie HOST_WIDE_INT *hv;
382c87b03e5Sespie int arith;
383c87b03e5Sespie {
384c87b03e5Sespie unsigned HOST_WIDE_INT signmask;
385c87b03e5Sespie
386c87b03e5Sespie if (count < 0)
387c87b03e5Sespie {
388c87b03e5Sespie rshift_double (l1, h1, -count, prec, lv, hv, arith);
389c87b03e5Sespie return;
390c87b03e5Sespie }
391c87b03e5Sespie
392c87b03e5Sespie #ifdef SHIFT_COUNT_TRUNCATED
393c87b03e5Sespie if (SHIFT_COUNT_TRUNCATED)
394c87b03e5Sespie count %= prec;
395c87b03e5Sespie #endif
396c87b03e5Sespie
397c87b03e5Sespie if (count >= 2 * HOST_BITS_PER_WIDE_INT)
398c87b03e5Sespie {
399c87b03e5Sespie /* Shifting by the host word size is undefined according to the
400c87b03e5Sespie ANSI standard, so we must handle this as a special case. */
401c87b03e5Sespie *hv = 0;
402c87b03e5Sespie *lv = 0;
403c87b03e5Sespie }
404c87b03e5Sespie else if (count >= HOST_BITS_PER_WIDE_INT)
405c87b03e5Sespie {
406c87b03e5Sespie *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
407c87b03e5Sespie *lv = 0;
408c87b03e5Sespie }
409c87b03e5Sespie else
410c87b03e5Sespie {
411c87b03e5Sespie *hv = (((unsigned HOST_WIDE_INT) h1 << count)
412c87b03e5Sespie | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
413c87b03e5Sespie *lv = l1 << count;
414c87b03e5Sespie }
415c87b03e5Sespie
416c87b03e5Sespie /* Sign extend all bits that are beyond the precision. */
417c87b03e5Sespie
418c87b03e5Sespie signmask = -((prec > HOST_BITS_PER_WIDE_INT
419c87b03e5Sespie ? ((unsigned HOST_WIDE_INT) *hv
420c87b03e5Sespie >> (prec - HOST_BITS_PER_WIDE_INT - 1))
421c87b03e5Sespie : (*lv >> (prec - 1))) & 1);
422c87b03e5Sespie
423c87b03e5Sespie if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
424c87b03e5Sespie ;
425c87b03e5Sespie else if (prec >= HOST_BITS_PER_WIDE_INT)
426c87b03e5Sespie {
427c87b03e5Sespie *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
428c87b03e5Sespie *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
429c87b03e5Sespie }
430c87b03e5Sespie else
431c87b03e5Sespie {
432c87b03e5Sespie *hv = signmask;
433c87b03e5Sespie *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
434c87b03e5Sespie *lv |= signmask << prec;
435c87b03e5Sespie }
436c87b03e5Sespie }
437c87b03e5Sespie
438c87b03e5Sespie /* Shift the doubleword integer in L1, H1 right by COUNT places
439c87b03e5Sespie keeping only PREC bits of result. COUNT must be positive.
440c87b03e5Sespie ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
441c87b03e5Sespie Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
442c87b03e5Sespie
443c87b03e5Sespie void
rshift_double(l1,h1,count,prec,lv,hv,arith)444c87b03e5Sespie rshift_double (l1, h1, count, prec, lv, hv, arith)
445c87b03e5Sespie unsigned HOST_WIDE_INT l1;
446c87b03e5Sespie HOST_WIDE_INT h1, count;
447c87b03e5Sespie unsigned int prec;
448c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
449c87b03e5Sespie HOST_WIDE_INT *hv;
450c87b03e5Sespie int arith;
451c87b03e5Sespie {
452c87b03e5Sespie unsigned HOST_WIDE_INT signmask;
453c87b03e5Sespie
454c87b03e5Sespie signmask = (arith
455c87b03e5Sespie ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
456c87b03e5Sespie : 0);
457c87b03e5Sespie
458c87b03e5Sespie #ifdef SHIFT_COUNT_TRUNCATED
459c87b03e5Sespie if (SHIFT_COUNT_TRUNCATED)
460c87b03e5Sespie count %= prec;
461c87b03e5Sespie #endif
462c87b03e5Sespie
463c87b03e5Sespie if (count >= 2 * HOST_BITS_PER_WIDE_INT)
464c87b03e5Sespie {
465c87b03e5Sespie /* Shifting by the host word size is undefined according to the
466c87b03e5Sespie ANSI standard, so we must handle this as a special case. */
467c87b03e5Sespie *hv = 0;
468c87b03e5Sespie *lv = 0;
469c87b03e5Sespie }
470c87b03e5Sespie else if (count >= HOST_BITS_PER_WIDE_INT)
471c87b03e5Sespie {
472c87b03e5Sespie *hv = 0;
473c87b03e5Sespie *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
474c87b03e5Sespie }
475c87b03e5Sespie else
476c87b03e5Sespie {
477c87b03e5Sespie *hv = (unsigned HOST_WIDE_INT) h1 >> count;
478c87b03e5Sespie *lv = ((l1 >> count)
479c87b03e5Sespie | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
480c87b03e5Sespie }
481c87b03e5Sespie
482c87b03e5Sespie /* Zero / sign extend all bits that are beyond the precision. */
483c87b03e5Sespie
484c87b03e5Sespie if (count >= (HOST_WIDE_INT)prec)
485c87b03e5Sespie {
486c87b03e5Sespie *hv = signmask;
487c87b03e5Sespie *lv = signmask;
488c87b03e5Sespie }
489c87b03e5Sespie else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
490c87b03e5Sespie ;
491c87b03e5Sespie else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
492c87b03e5Sespie {
493c87b03e5Sespie *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
494c87b03e5Sespie *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
495c87b03e5Sespie }
496c87b03e5Sespie else
497c87b03e5Sespie {
498c87b03e5Sespie *hv = signmask;
499c87b03e5Sespie *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
500c87b03e5Sespie *lv |= signmask << (prec - count);
501c87b03e5Sespie }
502c87b03e5Sespie }
503c87b03e5Sespie
504c87b03e5Sespie /* Rotate the doubleword integer in L1, H1 left by COUNT places
505c87b03e5Sespie keeping only PREC bits of result.
506c87b03e5Sespie Rotate right if COUNT is negative.
507c87b03e5Sespie Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508c87b03e5Sespie
509c87b03e5Sespie void
lrotate_double(l1,h1,count,prec,lv,hv)510c87b03e5Sespie lrotate_double (l1, h1, count, prec, lv, hv)
511c87b03e5Sespie unsigned HOST_WIDE_INT l1;
512c87b03e5Sespie HOST_WIDE_INT h1, count;
513c87b03e5Sespie unsigned int prec;
514c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
515c87b03e5Sespie HOST_WIDE_INT *hv;
516c87b03e5Sespie {
517c87b03e5Sespie unsigned HOST_WIDE_INT s1l, s2l;
518c87b03e5Sespie HOST_WIDE_INT s1h, s2h;
519c87b03e5Sespie
520c87b03e5Sespie count %= prec;
521c87b03e5Sespie if (count < 0)
522c87b03e5Sespie count += prec;
523c87b03e5Sespie
524c87b03e5Sespie lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525c87b03e5Sespie rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
526c87b03e5Sespie *lv = s1l | s2l;
527c87b03e5Sespie *hv = s1h | s2h;
528c87b03e5Sespie }
529c87b03e5Sespie
530c87b03e5Sespie /* Rotate the doubleword integer in L1, H1 left by COUNT places
531c87b03e5Sespie keeping only PREC bits of result. COUNT must be positive.
532c87b03e5Sespie Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
533c87b03e5Sespie
534c87b03e5Sespie void
rrotate_double(l1,h1,count,prec,lv,hv)535c87b03e5Sespie rrotate_double (l1, h1, count, prec, lv, hv)
536c87b03e5Sespie unsigned HOST_WIDE_INT l1;
537c87b03e5Sespie HOST_WIDE_INT h1, count;
538c87b03e5Sespie unsigned int prec;
539c87b03e5Sespie unsigned HOST_WIDE_INT *lv;
540c87b03e5Sespie HOST_WIDE_INT *hv;
541c87b03e5Sespie {
542c87b03e5Sespie unsigned HOST_WIDE_INT s1l, s2l;
543c87b03e5Sespie HOST_WIDE_INT s1h, s2h;
544c87b03e5Sespie
545c87b03e5Sespie count %= prec;
546c87b03e5Sespie if (count < 0)
547c87b03e5Sespie count += prec;
548c87b03e5Sespie
549c87b03e5Sespie rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550c87b03e5Sespie lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551c87b03e5Sespie *lv = s1l | s2l;
552c87b03e5Sespie *hv = s1h | s2h;
553c87b03e5Sespie }
554c87b03e5Sespie
555c87b03e5Sespie /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556c87b03e5Sespie for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557c87b03e5Sespie CODE is a tree code for a kind of division, one of
558c87b03e5Sespie TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559c87b03e5Sespie or EXACT_DIV_EXPR
560c87b03e5Sespie It controls how the quotient is rounded to an integer.
561c87b03e5Sespie Return nonzero if the operation overflows.
562c87b03e5Sespie UNS nonzero says do unsigned division. */
563c87b03e5Sespie
564c87b03e5Sespie int
div_and_round_double(code,uns,lnum_orig,hnum_orig,lden_orig,hden_orig,lquo,hquo,lrem,hrem)565c87b03e5Sespie div_and_round_double (code, uns,
566c87b03e5Sespie lnum_orig, hnum_orig, lden_orig, hden_orig,
567c87b03e5Sespie lquo, hquo, lrem, hrem)
568c87b03e5Sespie enum tree_code code;
569c87b03e5Sespie int uns;
570c87b03e5Sespie unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
571c87b03e5Sespie HOST_WIDE_INT hnum_orig;
572c87b03e5Sespie unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
573c87b03e5Sespie HOST_WIDE_INT hden_orig;
574c87b03e5Sespie unsigned HOST_WIDE_INT *lquo, *lrem;
575c87b03e5Sespie HOST_WIDE_INT *hquo, *hrem;
576c87b03e5Sespie {
577c87b03e5Sespie int quo_neg = 0;
578c87b03e5Sespie HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579c87b03e5Sespie HOST_WIDE_INT den[4], quo[4];
580c87b03e5Sespie int i, j;
581c87b03e5Sespie unsigned HOST_WIDE_INT work;
582c87b03e5Sespie unsigned HOST_WIDE_INT carry = 0;
583c87b03e5Sespie unsigned HOST_WIDE_INT lnum = lnum_orig;
584c87b03e5Sespie HOST_WIDE_INT hnum = hnum_orig;
585c87b03e5Sespie unsigned HOST_WIDE_INT lden = lden_orig;
586c87b03e5Sespie HOST_WIDE_INT hden = hden_orig;
587c87b03e5Sespie int overflow = 0;
588c87b03e5Sespie
589c87b03e5Sespie if (hden == 0 && lden == 0)
590c87b03e5Sespie overflow = 1, lden = 1;
591c87b03e5Sespie
592c87b03e5Sespie /* calculate quotient sign and convert operands to unsigned. */
593c87b03e5Sespie if (!uns)
594c87b03e5Sespie {
595c87b03e5Sespie if (hnum < 0)
596c87b03e5Sespie {
597c87b03e5Sespie quo_neg = ~ quo_neg;
598c87b03e5Sespie /* (minimum integer) / (-1) is the only overflow case. */
599c87b03e5Sespie if (neg_double (lnum, hnum, &lnum, &hnum)
600c87b03e5Sespie && ((HOST_WIDE_INT) lden & hden) == -1)
601c87b03e5Sespie overflow = 1;
602c87b03e5Sespie }
603c87b03e5Sespie if (hden < 0)
604c87b03e5Sespie {
605c87b03e5Sespie quo_neg = ~ quo_neg;
606c87b03e5Sespie neg_double (lden, hden, &lden, &hden);
607c87b03e5Sespie }
608c87b03e5Sespie }
609c87b03e5Sespie
610c87b03e5Sespie if (hnum == 0 && hden == 0)
611c87b03e5Sespie { /* single precision */
612c87b03e5Sespie *hquo = *hrem = 0;
613c87b03e5Sespie /* This unsigned division rounds toward zero. */
614c87b03e5Sespie *lquo = lnum / lden;
615c87b03e5Sespie goto finish_up;
616c87b03e5Sespie }
617c87b03e5Sespie
618c87b03e5Sespie if (hnum == 0)
619c87b03e5Sespie { /* trivial case: dividend < divisor */
620c87b03e5Sespie /* hden != 0 already checked. */
621c87b03e5Sespie *hquo = *lquo = 0;
622c87b03e5Sespie *hrem = hnum;
623c87b03e5Sespie *lrem = lnum;
624c87b03e5Sespie goto finish_up;
625c87b03e5Sespie }
626c87b03e5Sespie
627c87b03e5Sespie memset ((char *) quo, 0, sizeof quo);
628c87b03e5Sespie
629c87b03e5Sespie memset ((char *) num, 0, sizeof num); /* to zero 9th element */
630c87b03e5Sespie memset ((char *) den, 0, sizeof den);
631c87b03e5Sespie
632c87b03e5Sespie encode (num, lnum, hnum);
633c87b03e5Sespie encode (den, lden, hden);
634c87b03e5Sespie
635c87b03e5Sespie /* Special code for when the divisor < BASE. */
636c87b03e5Sespie if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
637c87b03e5Sespie {
638c87b03e5Sespie /* hnum != 0 already checked. */
639c87b03e5Sespie for (i = 4 - 1; i >= 0; i--)
640c87b03e5Sespie {
641c87b03e5Sespie work = num[i] + carry * BASE;
642c87b03e5Sespie quo[i] = work / lden;
643c87b03e5Sespie carry = work % lden;
644c87b03e5Sespie }
645c87b03e5Sespie }
646c87b03e5Sespie else
647c87b03e5Sespie {
648c87b03e5Sespie /* Full double precision division,
649c87b03e5Sespie with thanks to Don Knuth's "Seminumerical Algorithms". */
650c87b03e5Sespie int num_hi_sig, den_hi_sig;
651c87b03e5Sespie unsigned HOST_WIDE_INT quo_est, scale;
652c87b03e5Sespie
653c87b03e5Sespie /* Find the highest nonzero divisor digit. */
654c87b03e5Sespie for (i = 4 - 1;; i--)
655c87b03e5Sespie if (den[i] != 0)
656c87b03e5Sespie {
657c87b03e5Sespie den_hi_sig = i;
658c87b03e5Sespie break;
659c87b03e5Sespie }
660c87b03e5Sespie
661c87b03e5Sespie /* Insure that the first digit of the divisor is at least BASE/2.
662c87b03e5Sespie This is required by the quotient digit estimation algorithm. */
663c87b03e5Sespie
664c87b03e5Sespie scale = BASE / (den[den_hi_sig] + 1);
665c87b03e5Sespie if (scale > 1)
666c87b03e5Sespie { /* scale divisor and dividend */
667c87b03e5Sespie carry = 0;
668c87b03e5Sespie for (i = 0; i <= 4 - 1; i++)
669c87b03e5Sespie {
670c87b03e5Sespie work = (num[i] * scale) + carry;
671c87b03e5Sespie num[i] = LOWPART (work);
672c87b03e5Sespie carry = HIGHPART (work);
673c87b03e5Sespie }
674c87b03e5Sespie
675c87b03e5Sespie num[4] = carry;
676c87b03e5Sespie carry = 0;
677c87b03e5Sespie for (i = 0; i <= 4 - 1; i++)
678c87b03e5Sespie {
679c87b03e5Sespie work = (den[i] * scale) + carry;
680c87b03e5Sespie den[i] = LOWPART (work);
681c87b03e5Sespie carry = HIGHPART (work);
682c87b03e5Sespie if (den[i] != 0) den_hi_sig = i;
683c87b03e5Sespie }
684c87b03e5Sespie }
685c87b03e5Sespie
686c87b03e5Sespie num_hi_sig = 4;
687c87b03e5Sespie
688c87b03e5Sespie /* Main loop */
689c87b03e5Sespie for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
690c87b03e5Sespie {
691c87b03e5Sespie /* Guess the next quotient digit, quo_est, by dividing the first
692c87b03e5Sespie two remaining dividend digits by the high order quotient digit.
693c87b03e5Sespie quo_est is never low and is at most 2 high. */
694c87b03e5Sespie unsigned HOST_WIDE_INT tmp;
695c87b03e5Sespie
696c87b03e5Sespie num_hi_sig = i + den_hi_sig + 1;
697c87b03e5Sespie work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698c87b03e5Sespie if (num[num_hi_sig] != den[den_hi_sig])
699c87b03e5Sespie quo_est = work / den[den_hi_sig];
700c87b03e5Sespie else
701c87b03e5Sespie quo_est = BASE - 1;
702c87b03e5Sespie
703c87b03e5Sespie /* Refine quo_est so it's usually correct, and at most one high. */
704c87b03e5Sespie tmp = work - quo_est * den[den_hi_sig];
705c87b03e5Sespie if (tmp < BASE
706c87b03e5Sespie && (den[den_hi_sig - 1] * quo_est
707c87b03e5Sespie > (tmp * BASE + num[num_hi_sig - 2])))
708c87b03e5Sespie quo_est--;
709c87b03e5Sespie
710c87b03e5Sespie /* Try QUO_EST as the quotient digit, by multiplying the
711c87b03e5Sespie divisor by QUO_EST and subtracting from the remaining dividend.
712c87b03e5Sespie Keep in mind that QUO_EST is the I - 1st digit. */
713c87b03e5Sespie
714c87b03e5Sespie carry = 0;
715c87b03e5Sespie for (j = 0; j <= den_hi_sig; j++)
716c87b03e5Sespie {
717c87b03e5Sespie work = quo_est * den[j] + carry;
718c87b03e5Sespie carry = HIGHPART (work);
719c87b03e5Sespie work = num[i + j] - LOWPART (work);
720c87b03e5Sespie num[i + j] = LOWPART (work);
721c87b03e5Sespie carry += HIGHPART (work) != 0;
722c87b03e5Sespie }
723c87b03e5Sespie
724c87b03e5Sespie /* If quo_est was high by one, then num[i] went negative and
725c87b03e5Sespie we need to correct things. */
726c87b03e5Sespie if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
727c87b03e5Sespie {
728c87b03e5Sespie quo_est--;
729c87b03e5Sespie carry = 0; /* add divisor back in */
730c87b03e5Sespie for (j = 0; j <= den_hi_sig; j++)
731c87b03e5Sespie {
732c87b03e5Sespie work = num[i + j] + den[j] + carry;
733c87b03e5Sespie carry = HIGHPART (work);
734c87b03e5Sespie num[i + j] = LOWPART (work);
735c87b03e5Sespie }
736c87b03e5Sespie
737c87b03e5Sespie num [num_hi_sig] += carry;
738c87b03e5Sespie }
739c87b03e5Sespie
740c87b03e5Sespie /* Store the quotient digit. */
741c87b03e5Sespie quo[i] = quo_est;
742c87b03e5Sespie }
743c87b03e5Sespie }
744c87b03e5Sespie
745c87b03e5Sespie decode (quo, lquo, hquo);
746c87b03e5Sespie
747c87b03e5Sespie finish_up:
748c87b03e5Sespie /* if result is negative, make it so. */
749c87b03e5Sespie if (quo_neg)
750c87b03e5Sespie neg_double (*lquo, *hquo, lquo, hquo);
751c87b03e5Sespie
752c87b03e5Sespie /* compute trial remainder: rem = num - (quo * den) */
753c87b03e5Sespie mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754c87b03e5Sespie neg_double (*lrem, *hrem, lrem, hrem);
755c87b03e5Sespie add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
756c87b03e5Sespie
757c87b03e5Sespie switch (code)
758c87b03e5Sespie {
759c87b03e5Sespie case TRUNC_DIV_EXPR:
760c87b03e5Sespie case TRUNC_MOD_EXPR: /* round toward zero */
761c87b03e5Sespie case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762c87b03e5Sespie return overflow;
763c87b03e5Sespie
764c87b03e5Sespie case FLOOR_DIV_EXPR:
765c87b03e5Sespie case FLOOR_MOD_EXPR: /* round toward negative infinity */
766c87b03e5Sespie if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767c87b03e5Sespie {
768c87b03e5Sespie /* quo = quo - 1; */
769c87b03e5Sespie add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
770c87b03e5Sespie lquo, hquo);
771c87b03e5Sespie }
772c87b03e5Sespie else
773c87b03e5Sespie return overflow;
774c87b03e5Sespie break;
775c87b03e5Sespie
776c87b03e5Sespie case CEIL_DIV_EXPR:
777c87b03e5Sespie case CEIL_MOD_EXPR: /* round toward positive infinity */
778c87b03e5Sespie if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
779c87b03e5Sespie {
780c87b03e5Sespie add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
781c87b03e5Sespie lquo, hquo);
782c87b03e5Sespie }
783c87b03e5Sespie else
784c87b03e5Sespie return overflow;
785c87b03e5Sespie break;
786c87b03e5Sespie
787c87b03e5Sespie case ROUND_DIV_EXPR:
788c87b03e5Sespie case ROUND_MOD_EXPR: /* round to closest integer */
789c87b03e5Sespie {
790c87b03e5Sespie unsigned HOST_WIDE_INT labs_rem = *lrem;
791c87b03e5Sespie HOST_WIDE_INT habs_rem = *hrem;
792c87b03e5Sespie unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793c87b03e5Sespie HOST_WIDE_INT habs_den = hden, htwice;
794c87b03e5Sespie
795c87b03e5Sespie /* Get absolute values */
796c87b03e5Sespie if (*hrem < 0)
797c87b03e5Sespie neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
798c87b03e5Sespie if (hden < 0)
799c87b03e5Sespie neg_double (lden, hden, &labs_den, &habs_den);
800c87b03e5Sespie
801c87b03e5Sespie /* If (2 * abs (lrem) >= abs (lden)) */
802c87b03e5Sespie mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803c87b03e5Sespie labs_rem, habs_rem, <wice, &htwice);
804c87b03e5Sespie
805c87b03e5Sespie if (((unsigned HOST_WIDE_INT) habs_den
806c87b03e5Sespie < (unsigned HOST_WIDE_INT) htwice)
807c87b03e5Sespie || (((unsigned HOST_WIDE_INT) habs_den
808c87b03e5Sespie == (unsigned HOST_WIDE_INT) htwice)
809c87b03e5Sespie && (labs_den < ltwice)))
810c87b03e5Sespie {
811c87b03e5Sespie if (*hquo < 0)
812c87b03e5Sespie /* quo = quo - 1; */
813c87b03e5Sespie add_double (*lquo, *hquo,
814c87b03e5Sespie (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815c87b03e5Sespie else
816c87b03e5Sespie /* quo = quo + 1; */
817c87b03e5Sespie add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818c87b03e5Sespie lquo, hquo);
819c87b03e5Sespie }
820c87b03e5Sespie else
821c87b03e5Sespie return overflow;
822c87b03e5Sespie }
823c87b03e5Sespie break;
824c87b03e5Sespie
825c87b03e5Sespie default:
826c87b03e5Sespie abort ();
827c87b03e5Sespie }
828c87b03e5Sespie
829c87b03e5Sespie /* compute true remainder: rem = num - (quo * den) */
830c87b03e5Sespie mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831c87b03e5Sespie neg_double (*lrem, *hrem, lrem, hrem);
832c87b03e5Sespie add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833c87b03e5Sespie return overflow;
834c87b03e5Sespie }
835c87b03e5Sespie
836c87b03e5Sespie /* Given T, an expression, return the negation of T. Allow for T to be
837c87b03e5Sespie null, in which case return null. */
838c87b03e5Sespie
839c87b03e5Sespie static tree
negate_expr(t)840c87b03e5Sespie negate_expr (t)
841c87b03e5Sespie tree t;
842c87b03e5Sespie {
843c87b03e5Sespie tree type;
844c87b03e5Sespie tree tem;
845c87b03e5Sespie
846c87b03e5Sespie if (t == 0)
847c87b03e5Sespie return 0;
848c87b03e5Sespie
849c87b03e5Sespie type = TREE_TYPE (t);
850c87b03e5Sespie STRIP_SIGN_NOPS (t);
851c87b03e5Sespie
852c87b03e5Sespie switch (TREE_CODE (t))
853c87b03e5Sespie {
854c87b03e5Sespie case INTEGER_CST:
855c87b03e5Sespie case REAL_CST:
856c87b03e5Sespie if (! TREE_UNSIGNED (type)
857c87b03e5Sespie && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
858c87b03e5Sespie && ! TREE_OVERFLOW (tem))
859c87b03e5Sespie return tem;
860c87b03e5Sespie break;
861c87b03e5Sespie
862c87b03e5Sespie case NEGATE_EXPR:
863c87b03e5Sespie return convert (type, TREE_OPERAND (t, 0));
864c87b03e5Sespie
865c87b03e5Sespie case MINUS_EXPR:
866c87b03e5Sespie /* - (A - B) -> B - A */
867c87b03e5Sespie if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
868c87b03e5Sespie return convert (type,
869c87b03e5Sespie fold (build (MINUS_EXPR, TREE_TYPE (t),
870c87b03e5Sespie TREE_OPERAND (t, 1),
871c87b03e5Sespie TREE_OPERAND (t, 0))));
872c87b03e5Sespie break;
873c87b03e5Sespie
874c87b03e5Sespie default:
875c87b03e5Sespie break;
876c87b03e5Sespie }
877c87b03e5Sespie
878c87b03e5Sespie return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
879c87b03e5Sespie }
880c87b03e5Sespie
881c87b03e5Sespie /* Split a tree IN into a constant, literal and variable parts that could be
882c87b03e5Sespie combined with CODE to make IN. "constant" means an expression with
883c87b03e5Sespie TREE_CONSTANT but that isn't an actual constant. CODE must be a
884c87b03e5Sespie commutative arithmetic operation. Store the constant part into *CONP,
885c87b03e5Sespie the literal in *LITP and return the variable part. If a part isn't
886c87b03e5Sespie present, set it to null. If the tree does not decompose in this way,
887c87b03e5Sespie return the entire tree as the variable part and the other parts as null.
888c87b03e5Sespie
889c87b03e5Sespie If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
890c87b03e5Sespie case, we negate an operand that was subtracted. Except if it is a
891c87b03e5Sespie literal for which we use *MINUS_LITP instead.
892c87b03e5Sespie
893c87b03e5Sespie If NEGATE_P is true, we are negating all of IN, again except a literal
894c87b03e5Sespie for which we use *MINUS_LITP instead.
895c87b03e5Sespie
896c87b03e5Sespie If IN is itself a literal or constant, return it as appropriate.
897c87b03e5Sespie
898c87b03e5Sespie Note that we do not guarantee that any of the three values will be the
899c87b03e5Sespie same type as IN, but they will have the same signedness and mode. */
900c87b03e5Sespie
901c87b03e5Sespie static tree
split_tree(in,code,conp,litp,minus_litp,negate_p)902c87b03e5Sespie split_tree (in, code, conp, litp, minus_litp, negate_p)
903c87b03e5Sespie tree in;
904c87b03e5Sespie enum tree_code code;
905c87b03e5Sespie tree *conp, *litp, *minus_litp;
906c87b03e5Sespie int negate_p;
907c87b03e5Sespie {
908c87b03e5Sespie tree var = 0;
909c87b03e5Sespie
910c87b03e5Sespie *conp = 0;
911c87b03e5Sespie *litp = 0;
912c87b03e5Sespie *minus_litp = 0;
913c87b03e5Sespie
914c87b03e5Sespie /* Strip any conversions that don't change the machine mode or signedness. */
915c87b03e5Sespie STRIP_SIGN_NOPS (in);
916c87b03e5Sespie
917c87b03e5Sespie if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
918c87b03e5Sespie *litp = in;
919c87b03e5Sespie else if (TREE_CODE (in) == code
920c87b03e5Sespie || (! FLOAT_TYPE_P (TREE_TYPE (in))
921c87b03e5Sespie /* We can associate addition and subtraction together (even
922c87b03e5Sespie though the C standard doesn't say so) for integers because
923c87b03e5Sespie the value is not affected. For reals, the value might be
924c87b03e5Sespie affected, so we can't. */
925c87b03e5Sespie && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
926c87b03e5Sespie || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
927c87b03e5Sespie {
928c87b03e5Sespie tree op0 = TREE_OPERAND (in, 0);
929c87b03e5Sespie tree op1 = TREE_OPERAND (in, 1);
930c87b03e5Sespie int neg1_p = TREE_CODE (in) == MINUS_EXPR;
931c87b03e5Sespie int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
932c87b03e5Sespie
933c87b03e5Sespie /* First see if either of the operands is a literal, then a constant. */
934c87b03e5Sespie if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
935c87b03e5Sespie *litp = op0, op0 = 0;
936c87b03e5Sespie else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
937c87b03e5Sespie *litp = op1, neg_litp_p = neg1_p, op1 = 0;
938c87b03e5Sespie
939c87b03e5Sespie if (op0 != 0 && TREE_CONSTANT (op0))
940c87b03e5Sespie *conp = op0, op0 = 0;
941c87b03e5Sespie else if (op1 != 0 && TREE_CONSTANT (op1))
942c87b03e5Sespie *conp = op1, neg_conp_p = neg1_p, op1 = 0;
943c87b03e5Sespie
944c87b03e5Sespie /* If we haven't dealt with either operand, this is not a case we can
945c87b03e5Sespie decompose. Otherwise, VAR is either of the ones remaining, if any. */
946c87b03e5Sespie if (op0 != 0 && op1 != 0)
947c87b03e5Sespie var = in;
948c87b03e5Sespie else if (op0 != 0)
949c87b03e5Sespie var = op0;
950c87b03e5Sespie else
951c87b03e5Sespie var = op1, neg_var_p = neg1_p;
952c87b03e5Sespie
953c87b03e5Sespie /* Now do any needed negations. */
954c87b03e5Sespie if (neg_litp_p)
955c87b03e5Sespie *minus_litp = *litp, *litp = 0;
956c87b03e5Sespie if (neg_conp_p)
957c87b03e5Sespie *conp = negate_expr (*conp);
958c87b03e5Sespie if (neg_var_p)
959c87b03e5Sespie var = negate_expr (var);
960c87b03e5Sespie }
961c87b03e5Sespie else if (TREE_CONSTANT (in))
962c87b03e5Sespie *conp = in;
963c87b03e5Sespie else
964c87b03e5Sespie var = in;
965c87b03e5Sespie
966c87b03e5Sespie if (negate_p)
967c87b03e5Sespie {
968c87b03e5Sespie if (*litp)
969c87b03e5Sespie *minus_litp = *litp, *litp = 0;
970c87b03e5Sespie else if (*minus_litp)
971c87b03e5Sespie *litp = *minus_litp, *minus_litp = 0;
972c87b03e5Sespie *conp = negate_expr (*conp);
973c87b03e5Sespie var = negate_expr (var);
974c87b03e5Sespie }
975c87b03e5Sespie
976c87b03e5Sespie return var;
977c87b03e5Sespie }
978c87b03e5Sespie
979c87b03e5Sespie /* Re-associate trees split by the above function. T1 and T2 are either
980c87b03e5Sespie expressions to associate or null. Return the new expression, if any. If
981c87b03e5Sespie we build an operation, do it in TYPE and with CODE. */
982c87b03e5Sespie
983c87b03e5Sespie static tree
associate_trees(t1,t2,code,type)984c87b03e5Sespie associate_trees (t1, t2, code, type)
985c87b03e5Sespie tree t1, t2;
986c87b03e5Sespie enum tree_code code;
987c87b03e5Sespie tree type;
988c87b03e5Sespie {
989c87b03e5Sespie if (t1 == 0)
990c87b03e5Sespie return t2;
991c87b03e5Sespie else if (t2 == 0)
992c87b03e5Sespie return t1;
993c87b03e5Sespie
994c87b03e5Sespie /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
995c87b03e5Sespie try to fold this since we will have infinite recursion. But do
996c87b03e5Sespie deal with any NEGATE_EXPRs. */
997c87b03e5Sespie if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
998c87b03e5Sespie || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
999c87b03e5Sespie {
1000c87b03e5Sespie if (code == PLUS_EXPR)
1001c87b03e5Sespie {
1002c87b03e5Sespie if (TREE_CODE (t1) == NEGATE_EXPR)
1003c87b03e5Sespie return build (MINUS_EXPR, type, convert (type, t2),
1004c87b03e5Sespie convert (type, TREE_OPERAND (t1, 0)));
1005c87b03e5Sespie else if (TREE_CODE (t2) == NEGATE_EXPR)
1006c87b03e5Sespie return build (MINUS_EXPR, type, convert (type, t1),
1007c87b03e5Sespie convert (type, TREE_OPERAND (t2, 0)));
1008c87b03e5Sespie }
1009c87b03e5Sespie return build (code, type, convert (type, t1), convert (type, t2));
1010c87b03e5Sespie }
1011c87b03e5Sespie
1012c87b03e5Sespie return fold (build (code, type, convert (type, t1), convert (type, t2)));
1013c87b03e5Sespie }
1014c87b03e5Sespie
1015c87b03e5Sespie /* Combine two integer constants ARG1 and ARG2 under operation CODE
1016c87b03e5Sespie to produce a new constant.
1017c87b03e5Sespie
1018c87b03e5Sespie If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1019c87b03e5Sespie
1020c87b03e5Sespie static tree
int_const_binop(code,arg1,arg2,notrunc)1021c87b03e5Sespie int_const_binop (code, arg1, arg2, notrunc)
1022c87b03e5Sespie enum tree_code code;
1023c87b03e5Sespie tree arg1, arg2;
1024c87b03e5Sespie int notrunc;
1025c87b03e5Sespie {
1026c87b03e5Sespie unsigned HOST_WIDE_INT int1l, int2l;
1027c87b03e5Sespie HOST_WIDE_INT int1h, int2h;
1028c87b03e5Sespie unsigned HOST_WIDE_INT low;
1029c87b03e5Sespie HOST_WIDE_INT hi;
1030c87b03e5Sespie unsigned HOST_WIDE_INT garbagel;
1031c87b03e5Sespie HOST_WIDE_INT garbageh;
1032c87b03e5Sespie tree t;
1033c87b03e5Sespie tree type = TREE_TYPE (arg1);
1034c87b03e5Sespie int uns = TREE_UNSIGNED (type);
1035c87b03e5Sespie int is_sizetype
1036c87b03e5Sespie = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1037c87b03e5Sespie int overflow = 0;
1038c87b03e5Sespie int no_overflow = 0;
1039cd3be6e5Savsm int sizeof_flag = 0;
1040cd3be6e5Savsm
1041cd3be6e5Savsm if (SIZEOF_PTR_DERIVED (arg1) == 1 || SIZEOF_PTR_DERIVED (arg2) == 1)
1042cd3be6e5Savsm sizeof_flag = 1;
1043c87b03e5Sespie
1044c87b03e5Sespie int1l = TREE_INT_CST_LOW (arg1);
1045c87b03e5Sespie int1h = TREE_INT_CST_HIGH (arg1);
1046c87b03e5Sespie int2l = TREE_INT_CST_LOW (arg2);
1047c87b03e5Sespie int2h = TREE_INT_CST_HIGH (arg2);
1048c87b03e5Sespie
1049c87b03e5Sespie switch (code)
1050c87b03e5Sespie {
1051c87b03e5Sespie case BIT_IOR_EXPR:
1052c87b03e5Sespie low = int1l | int2l, hi = int1h | int2h;
1053c87b03e5Sespie break;
1054c87b03e5Sespie
1055c87b03e5Sespie case BIT_XOR_EXPR:
1056c87b03e5Sespie low = int1l ^ int2l, hi = int1h ^ int2h;
1057c87b03e5Sespie break;
1058c87b03e5Sespie
1059c87b03e5Sespie case BIT_AND_EXPR:
1060c87b03e5Sespie low = int1l & int2l, hi = int1h & int2h;
1061c87b03e5Sespie break;
1062c87b03e5Sespie
1063c87b03e5Sespie case BIT_ANDTC_EXPR:
1064c87b03e5Sespie low = int1l & ~int2l, hi = int1h & ~int2h;
1065c87b03e5Sespie break;
1066c87b03e5Sespie
1067c87b03e5Sespie case RSHIFT_EXPR:
1068c87b03e5Sespie int2l = -int2l;
1069c87b03e5Sespie case LSHIFT_EXPR:
1070c87b03e5Sespie /* It's unclear from the C standard whether shifts can overflow.
1071c87b03e5Sespie The following code ignores overflow; perhaps a C standard
1072c87b03e5Sespie interpretation ruling is needed. */
1073c87b03e5Sespie lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1074c87b03e5Sespie &low, &hi, !uns);
1075c87b03e5Sespie no_overflow = 1;
1076c87b03e5Sespie break;
1077c87b03e5Sespie
1078c87b03e5Sespie case RROTATE_EXPR:
1079c87b03e5Sespie int2l = - int2l;
1080c87b03e5Sespie case LROTATE_EXPR:
1081c87b03e5Sespie lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1082c87b03e5Sespie &low, &hi);
1083c87b03e5Sespie break;
1084c87b03e5Sespie
1085c87b03e5Sespie case PLUS_EXPR:
1086c87b03e5Sespie overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1087c87b03e5Sespie break;
1088c87b03e5Sespie
1089c87b03e5Sespie case MINUS_EXPR:
1090c87b03e5Sespie neg_double (int2l, int2h, &low, &hi);
1091c87b03e5Sespie add_double (int1l, int1h, low, hi, &low, &hi);
1092c87b03e5Sespie overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1093c87b03e5Sespie break;
1094c87b03e5Sespie
1095c87b03e5Sespie case MULT_EXPR:
1096c87b03e5Sespie overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1097c87b03e5Sespie break;
1098c87b03e5Sespie
1099c87b03e5Sespie case TRUNC_DIV_EXPR:
1100c87b03e5Sespie case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1101c87b03e5Sespie case EXACT_DIV_EXPR:
1102c87b03e5Sespie /* This is a shortcut for a common special case. */
1103c87b03e5Sespie if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1104c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg1)
1105c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg2)
1106c87b03e5Sespie && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1107c87b03e5Sespie {
1108c87b03e5Sespie if (code == CEIL_DIV_EXPR)
1109c87b03e5Sespie int1l += int2l - 1;
1110c87b03e5Sespie
1111c87b03e5Sespie low = int1l / int2l, hi = 0;
1112c87b03e5Sespie break;
1113c87b03e5Sespie }
1114c87b03e5Sespie
1115c87b03e5Sespie /* ... fall through ... */
1116c87b03e5Sespie
1117c87b03e5Sespie case ROUND_DIV_EXPR:
1118c87b03e5Sespie if (int2h == 0 && int2l == 1)
1119c87b03e5Sespie {
1120c87b03e5Sespie low = int1l, hi = int1h;
1121c87b03e5Sespie break;
1122c87b03e5Sespie }
1123c87b03e5Sespie if (int1l == int2l && int1h == int2h
1124c87b03e5Sespie && ! (int1l == 0 && int1h == 0))
1125c87b03e5Sespie {
1126c87b03e5Sespie low = 1, hi = 0;
1127c87b03e5Sespie break;
1128c87b03e5Sespie }
1129c87b03e5Sespie overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1130c87b03e5Sespie &low, &hi, &garbagel, &garbageh);
1131c87b03e5Sespie break;
1132c87b03e5Sespie
1133c87b03e5Sespie case TRUNC_MOD_EXPR:
1134c87b03e5Sespie case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1135c87b03e5Sespie /* This is a shortcut for a common special case. */
1136c87b03e5Sespie if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1137c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg1)
1138c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg2)
1139c87b03e5Sespie && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1140c87b03e5Sespie {
1141c87b03e5Sespie if (code == CEIL_MOD_EXPR)
1142c87b03e5Sespie int1l += int2l - 1;
1143c87b03e5Sespie low = int1l % int2l, hi = 0;
1144c87b03e5Sespie break;
1145c87b03e5Sespie }
1146c87b03e5Sespie
1147c87b03e5Sespie /* ... fall through ... */
1148c87b03e5Sespie
1149c87b03e5Sespie case ROUND_MOD_EXPR:
1150c87b03e5Sespie overflow = div_and_round_double (code, uns,
1151c87b03e5Sespie int1l, int1h, int2l, int2h,
1152c87b03e5Sespie &garbagel, &garbageh, &low, &hi);
1153c87b03e5Sespie break;
1154c87b03e5Sespie
1155c87b03e5Sespie case MIN_EXPR:
1156c87b03e5Sespie case MAX_EXPR:
1157c87b03e5Sespie if (uns)
1158c87b03e5Sespie low = (((unsigned HOST_WIDE_INT) int1h
1159c87b03e5Sespie < (unsigned HOST_WIDE_INT) int2h)
1160c87b03e5Sespie || (((unsigned HOST_WIDE_INT) int1h
1161c87b03e5Sespie == (unsigned HOST_WIDE_INT) int2h)
1162c87b03e5Sespie && int1l < int2l));
1163c87b03e5Sespie else
1164c87b03e5Sespie low = (int1h < int2h
1165c87b03e5Sespie || (int1h == int2h && int1l < int2l));
1166c87b03e5Sespie
1167c87b03e5Sespie if (low == (code == MIN_EXPR))
1168c87b03e5Sespie low = int1l, hi = int1h;
1169c87b03e5Sespie else
1170c87b03e5Sespie low = int2l, hi = int2h;
1171c87b03e5Sespie break;
1172c87b03e5Sespie
1173c87b03e5Sespie default:
1174c87b03e5Sespie abort ();
1175c87b03e5Sespie }
1176c87b03e5Sespie
1177c87b03e5Sespie /* If this is for a sizetype, can be represented as one (signed)
1178c87b03e5Sespie HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1179c87b03e5Sespie constants. */
1180c87b03e5Sespie if (is_sizetype
1181c87b03e5Sespie && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1182c87b03e5Sespie || (hi == -1 && (HOST_WIDE_INT) low < 0))
1183c87b03e5Sespie && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1184c87b03e5Sespie return size_int_type_wide (low, type);
1185c87b03e5Sespie else
1186c87b03e5Sespie {
1187c87b03e5Sespie t = build_int_2 (low, hi);
1188c87b03e5Sespie TREE_TYPE (t) = TREE_TYPE (arg1);
1189c87b03e5Sespie }
1190c87b03e5Sespie
1191c87b03e5Sespie TREE_OVERFLOW (t)
1192c87b03e5Sespie = ((notrunc
1193c87b03e5Sespie ? (!uns || is_sizetype) && overflow
1194c87b03e5Sespie : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1195c87b03e5Sespie && ! no_overflow))
1196c87b03e5Sespie | TREE_OVERFLOW (arg1)
1197c87b03e5Sespie | TREE_OVERFLOW (arg2));
1198c87b03e5Sespie
1199c87b03e5Sespie /* If we're doing a size calculation, unsigned arithmetic does overflow.
1200c87b03e5Sespie So check if force_fit_type truncated the value. */
1201c87b03e5Sespie if (is_sizetype
1202c87b03e5Sespie && ! TREE_OVERFLOW (t)
1203c87b03e5Sespie && (TREE_INT_CST_HIGH (t) != hi
1204c87b03e5Sespie || TREE_INT_CST_LOW (t) != low))
1205c87b03e5Sespie TREE_OVERFLOW (t) = 1;
1206c87b03e5Sespie
1207c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1208c87b03e5Sespie | TREE_CONSTANT_OVERFLOW (arg1)
1209c87b03e5Sespie | TREE_CONSTANT_OVERFLOW (arg2));
1210cd3be6e5Savsm
1211cd3be6e5Savsm if (sizeof_flag == 1)
1212cd3be6e5Savsm SIZEOF_PTR_DERIVED (t) = 1;
1213cd3be6e5Savsm
1214c87b03e5Sespie return t;
1215c87b03e5Sespie }
1216c87b03e5Sespie
1217c87b03e5Sespie /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1218c87b03e5Sespie constant. We assume ARG1 and ARG2 have the same data type, or at least
1219c87b03e5Sespie are the same kind of constant and the same machine mode.
1220c87b03e5Sespie
1221c87b03e5Sespie If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1222c87b03e5Sespie
1223c87b03e5Sespie static tree
const_binop(code,arg1,arg2,notrunc)1224c87b03e5Sespie const_binop (code, arg1, arg2, notrunc)
1225c87b03e5Sespie enum tree_code code;
1226c87b03e5Sespie tree arg1, arg2;
1227c87b03e5Sespie int notrunc;
1228c87b03e5Sespie {
1229c87b03e5Sespie STRIP_NOPS (arg1);
1230c87b03e5Sespie STRIP_NOPS (arg2);
1231c87b03e5Sespie
1232c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST)
1233c87b03e5Sespie return int_const_binop (code, arg1, arg2, notrunc);
1234c87b03e5Sespie
1235c87b03e5Sespie if (TREE_CODE (arg1) == REAL_CST)
1236c87b03e5Sespie {
1237c87b03e5Sespie REAL_VALUE_TYPE d1;
1238c87b03e5Sespie REAL_VALUE_TYPE d2;
1239c87b03e5Sespie REAL_VALUE_TYPE value;
1240c87b03e5Sespie tree t;
1241c87b03e5Sespie
1242c87b03e5Sespie d1 = TREE_REAL_CST (arg1);
1243c87b03e5Sespie d2 = TREE_REAL_CST (arg2);
1244c87b03e5Sespie
1245c87b03e5Sespie /* If either operand is a NaN, just return it. Otherwise, set up
1246c87b03e5Sespie for floating-point trap; we return an overflow. */
1247c87b03e5Sespie if (REAL_VALUE_ISNAN (d1))
1248c87b03e5Sespie return arg1;
1249c87b03e5Sespie else if (REAL_VALUE_ISNAN (d2))
1250c87b03e5Sespie return arg2;
1251c87b03e5Sespie
1252c87b03e5Sespie REAL_ARITHMETIC (value, code, d1, d2);
1253c87b03e5Sespie
1254c87b03e5Sespie t = build_real (TREE_TYPE (arg1),
1255c87b03e5Sespie real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1256c87b03e5Sespie value));
1257c87b03e5Sespie
1258c87b03e5Sespie TREE_OVERFLOW (t)
1259c87b03e5Sespie = (force_fit_type (t, 0)
1260c87b03e5Sespie | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1261c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t)
1262c87b03e5Sespie = TREE_OVERFLOW (t)
1263c87b03e5Sespie | TREE_CONSTANT_OVERFLOW (arg1)
1264c87b03e5Sespie | TREE_CONSTANT_OVERFLOW (arg2);
1265c87b03e5Sespie return t;
1266c87b03e5Sespie }
1267c87b03e5Sespie if (TREE_CODE (arg1) == COMPLEX_CST)
1268c87b03e5Sespie {
1269c87b03e5Sespie tree type = TREE_TYPE (arg1);
1270c87b03e5Sespie tree r1 = TREE_REALPART (arg1);
1271c87b03e5Sespie tree i1 = TREE_IMAGPART (arg1);
1272c87b03e5Sespie tree r2 = TREE_REALPART (arg2);
1273c87b03e5Sespie tree i2 = TREE_IMAGPART (arg2);
1274c87b03e5Sespie tree t;
1275c87b03e5Sespie
1276c87b03e5Sespie switch (code)
1277c87b03e5Sespie {
1278c87b03e5Sespie case PLUS_EXPR:
1279c87b03e5Sespie t = build_complex (type,
1280c87b03e5Sespie const_binop (PLUS_EXPR, r1, r2, notrunc),
1281c87b03e5Sespie const_binop (PLUS_EXPR, i1, i2, notrunc));
1282c87b03e5Sespie break;
1283c87b03e5Sespie
1284c87b03e5Sespie case MINUS_EXPR:
1285c87b03e5Sespie t = build_complex (type,
1286c87b03e5Sespie const_binop (MINUS_EXPR, r1, r2, notrunc),
1287c87b03e5Sespie const_binop (MINUS_EXPR, i1, i2, notrunc));
1288c87b03e5Sespie break;
1289c87b03e5Sespie
1290c87b03e5Sespie case MULT_EXPR:
1291c87b03e5Sespie t = build_complex (type,
1292c87b03e5Sespie const_binop (MINUS_EXPR,
1293c87b03e5Sespie const_binop (MULT_EXPR,
1294c87b03e5Sespie r1, r2, notrunc),
1295c87b03e5Sespie const_binop (MULT_EXPR,
1296c87b03e5Sespie i1, i2, notrunc),
1297c87b03e5Sespie notrunc),
1298c87b03e5Sespie const_binop (PLUS_EXPR,
1299c87b03e5Sespie const_binop (MULT_EXPR,
1300c87b03e5Sespie r1, i2, notrunc),
1301c87b03e5Sespie const_binop (MULT_EXPR,
1302c87b03e5Sespie i1, r2, notrunc),
1303c87b03e5Sespie notrunc));
1304c87b03e5Sespie break;
1305c87b03e5Sespie
1306c87b03e5Sespie case RDIV_EXPR:
1307c87b03e5Sespie {
1308c87b03e5Sespie tree magsquared
1309c87b03e5Sespie = const_binop (PLUS_EXPR,
1310c87b03e5Sespie const_binop (MULT_EXPR, r2, r2, notrunc),
1311c87b03e5Sespie const_binop (MULT_EXPR, i2, i2, notrunc),
1312c87b03e5Sespie notrunc);
1313c87b03e5Sespie
1314c87b03e5Sespie t = build_complex (type,
1315c87b03e5Sespie const_binop
1316c87b03e5Sespie (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1317c87b03e5Sespie ? TRUNC_DIV_EXPR : RDIV_EXPR,
1318c87b03e5Sespie const_binop (PLUS_EXPR,
1319c87b03e5Sespie const_binop (MULT_EXPR, r1, r2,
1320c87b03e5Sespie notrunc),
1321c87b03e5Sespie const_binop (MULT_EXPR, i1, i2,
1322c87b03e5Sespie notrunc),
1323c87b03e5Sespie notrunc),
1324c87b03e5Sespie magsquared, notrunc),
1325c87b03e5Sespie const_binop
1326c87b03e5Sespie (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1327c87b03e5Sespie ? TRUNC_DIV_EXPR : RDIV_EXPR,
1328c87b03e5Sespie const_binop (MINUS_EXPR,
1329c87b03e5Sespie const_binop (MULT_EXPR, i1, r2,
1330c87b03e5Sespie notrunc),
1331c87b03e5Sespie const_binop (MULT_EXPR, r1, i2,
1332c87b03e5Sespie notrunc),
1333c87b03e5Sespie notrunc),
1334c87b03e5Sespie magsquared, notrunc));
1335c87b03e5Sespie }
1336c87b03e5Sespie break;
1337c87b03e5Sespie
1338c87b03e5Sespie default:
1339c87b03e5Sespie abort ();
1340c87b03e5Sespie }
1341c87b03e5Sespie return t;
1342c87b03e5Sespie }
1343c87b03e5Sespie return 0;
1344c87b03e5Sespie }
1345c87b03e5Sespie
1346c87b03e5Sespie /* These are the hash table functions for the hash table of INTEGER_CST
1347c87b03e5Sespie nodes of a sizetype. */
1348c87b03e5Sespie
1349c87b03e5Sespie /* Return the hash code code X, an INTEGER_CST. */
1350c87b03e5Sespie
1351c87b03e5Sespie static hashval_t
size_htab_hash(x)1352c87b03e5Sespie size_htab_hash (x)
1353c87b03e5Sespie const void *x;
1354c87b03e5Sespie {
1355c87b03e5Sespie tree t = (tree) x;
1356c87b03e5Sespie
1357c87b03e5Sespie return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1358c87b03e5Sespie ^ htab_hash_pointer (TREE_TYPE (t))
1359c87b03e5Sespie ^ (TREE_OVERFLOW (t) << 20));
1360c87b03e5Sespie }
1361c87b03e5Sespie
1362c87b03e5Sespie /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1363c87b03e5Sespie is the same as that given by *Y, which is the same. */
1364c87b03e5Sespie
1365c87b03e5Sespie static int
size_htab_eq(x,y)1366c87b03e5Sespie size_htab_eq (x, y)
1367c87b03e5Sespie const void *x;
1368c87b03e5Sespie const void *y;
1369c87b03e5Sespie {
1370c87b03e5Sespie tree xt = (tree) x;
1371c87b03e5Sespie tree yt = (tree) y;
1372c87b03e5Sespie
1373c87b03e5Sespie return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1374c87b03e5Sespie && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1375c87b03e5Sespie && TREE_TYPE (xt) == TREE_TYPE (yt)
1376c87b03e5Sespie && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1377c87b03e5Sespie }
1378c87b03e5Sespie
1379c87b03e5Sespie /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1380c87b03e5Sespie bits are given by NUMBER and of the sizetype represented by KIND. */
1381c87b03e5Sespie
1382c87b03e5Sespie tree
size_int_wide(number,kind)1383c87b03e5Sespie size_int_wide (number, kind)
1384c87b03e5Sespie HOST_WIDE_INT number;
1385c87b03e5Sespie enum size_type_kind kind;
1386c87b03e5Sespie {
1387c87b03e5Sespie return size_int_type_wide (number, sizetype_tab[(int) kind]);
1388c87b03e5Sespie }
1389c87b03e5Sespie
1390c87b03e5Sespie /* Likewise, but the desired type is specified explicitly. */
1391c87b03e5Sespie
1392c87b03e5Sespie static GTY (()) tree new_const;
1393c87b03e5Sespie static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1394c87b03e5Sespie htab_t size_htab;
1395c87b03e5Sespie
1396c87b03e5Sespie tree
size_int_type_wide(number,type)1397c87b03e5Sespie size_int_type_wide (number, type)
1398c87b03e5Sespie HOST_WIDE_INT number;
1399c87b03e5Sespie tree type;
1400c87b03e5Sespie {
1401c87b03e5Sespie PTR *slot;
1402c87b03e5Sespie
1403c87b03e5Sespie if (size_htab == 0)
1404c87b03e5Sespie {
1405c87b03e5Sespie size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1406c87b03e5Sespie new_const = make_node (INTEGER_CST);
1407c87b03e5Sespie }
1408c87b03e5Sespie
1409c87b03e5Sespie /* Adjust NEW_CONST to be the constant we want. If it's already in the
1410c87b03e5Sespie hash table, we return the value from the hash table. Otherwise, we
1411c87b03e5Sespie place that in the hash table and make a new node for the next time. */
1412c87b03e5Sespie TREE_INT_CST_LOW (new_const) = number;
1413c87b03e5Sespie TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1414c87b03e5Sespie TREE_TYPE (new_const) = type;
1415c87b03e5Sespie TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1416c87b03e5Sespie = force_fit_type (new_const, 0);
1417c87b03e5Sespie
1418c87b03e5Sespie slot = htab_find_slot (size_htab, new_const, INSERT);
1419c87b03e5Sespie if (*slot == 0)
1420c87b03e5Sespie {
1421c87b03e5Sespie tree t = new_const;
1422c87b03e5Sespie
1423c87b03e5Sespie *slot = (PTR) new_const;
1424c87b03e5Sespie new_const = make_node (INTEGER_CST);
1425c87b03e5Sespie return t;
1426c87b03e5Sespie }
1427c87b03e5Sespie else
1428c87b03e5Sespie return (tree) *slot;
1429c87b03e5Sespie }
1430c87b03e5Sespie
1431c87b03e5Sespie /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1432c87b03e5Sespie is a tree code. The type of the result is taken from the operands.
1433c87b03e5Sespie Both must be the same type integer type and it must be a size type.
1434c87b03e5Sespie If the operands are constant, so is the result. */
1435c87b03e5Sespie
1436c87b03e5Sespie tree
size_binop(code,arg0,arg1)1437c87b03e5Sespie size_binop (code, arg0, arg1)
1438c87b03e5Sespie enum tree_code code;
1439c87b03e5Sespie tree arg0, arg1;
1440c87b03e5Sespie {
1441c87b03e5Sespie tree type = TREE_TYPE (arg0);
1442c87b03e5Sespie
1443c87b03e5Sespie if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1444c87b03e5Sespie || type != TREE_TYPE (arg1))
1445c87b03e5Sespie abort ();
1446c87b03e5Sespie
1447c87b03e5Sespie /* Handle the special case of two integer constants faster. */
1448c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1449c87b03e5Sespie {
1450c87b03e5Sespie /* And some specific cases even faster than that. */
1451c87b03e5Sespie if (code == PLUS_EXPR && integer_zerop (arg0))
1452c87b03e5Sespie return arg1;
1453c87b03e5Sespie else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1454c87b03e5Sespie && integer_zerop (arg1))
1455c87b03e5Sespie return arg0;
1456c87b03e5Sespie else if (code == MULT_EXPR && integer_onep (arg0))
1457c87b03e5Sespie return arg1;
1458c87b03e5Sespie
1459c87b03e5Sespie /* Handle general case of two integer constants. */
1460c87b03e5Sespie return int_const_binop (code, arg0, arg1, 0);
1461c87b03e5Sespie }
1462c87b03e5Sespie
1463c87b03e5Sespie if (arg0 == error_mark_node || arg1 == error_mark_node)
1464c87b03e5Sespie return error_mark_node;
1465c87b03e5Sespie
1466c87b03e5Sespie return fold (build (code, type, arg0, arg1));
1467c87b03e5Sespie }
1468c87b03e5Sespie
1469c87b03e5Sespie /* Given two values, either both of sizetype or both of bitsizetype,
1470c87b03e5Sespie compute the difference between the two values. Return the value
1471c87b03e5Sespie in signed type corresponding to the type of the operands. */
1472c87b03e5Sespie
1473c87b03e5Sespie tree
size_diffop(arg0,arg1)1474c87b03e5Sespie size_diffop (arg0, arg1)
1475c87b03e5Sespie tree arg0, arg1;
1476c87b03e5Sespie {
1477c87b03e5Sespie tree type = TREE_TYPE (arg0);
1478c87b03e5Sespie tree ctype;
1479c87b03e5Sespie
1480c87b03e5Sespie if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1481c87b03e5Sespie || type != TREE_TYPE (arg1))
1482c87b03e5Sespie abort ();
1483c87b03e5Sespie
1484c87b03e5Sespie /* If the type is already signed, just do the simple thing. */
1485c87b03e5Sespie if (! TREE_UNSIGNED (type))
1486c87b03e5Sespie return size_binop (MINUS_EXPR, arg0, arg1);
1487c87b03e5Sespie
1488c87b03e5Sespie ctype = (type == bitsizetype || type == ubitsizetype
1489c87b03e5Sespie ? sbitsizetype : ssizetype);
1490c87b03e5Sespie
1491c87b03e5Sespie /* If either operand is not a constant, do the conversions to the signed
1492c87b03e5Sespie type and subtract. The hardware will do the right thing with any
1493c87b03e5Sespie overflow in the subtraction. */
1494c87b03e5Sespie if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1495c87b03e5Sespie return size_binop (MINUS_EXPR, convert (ctype, arg0),
1496c87b03e5Sespie convert (ctype, arg1));
1497c87b03e5Sespie
1498c87b03e5Sespie /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1499c87b03e5Sespie Otherwise, subtract the other way, convert to CTYPE (we know that can't
1500c87b03e5Sespie overflow) and negate (which can't either). Special-case a result
1501c87b03e5Sespie of zero while we're here. */
1502c87b03e5Sespie if (tree_int_cst_equal (arg0, arg1))
1503c87b03e5Sespie return convert (ctype, integer_zero_node);
1504c87b03e5Sespie else if (tree_int_cst_lt (arg1, arg0))
1505c87b03e5Sespie return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1506c87b03e5Sespie else
1507c87b03e5Sespie return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1508c87b03e5Sespie convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1509c87b03e5Sespie }
1510c87b03e5Sespie
1511c87b03e5Sespie
1512c87b03e5Sespie /* Given T, a tree representing type conversion of ARG1, a constant,
1513c87b03e5Sespie return a constant tree representing the result of conversion. */
1514c87b03e5Sespie
1515c87b03e5Sespie static tree
fold_convert(t,arg1)1516c87b03e5Sespie fold_convert (t, arg1)
1517c87b03e5Sespie tree t;
1518c87b03e5Sespie tree arg1;
1519c87b03e5Sespie {
1520c87b03e5Sespie tree type = TREE_TYPE (t);
1521c87b03e5Sespie int overflow = 0;
1522c87b03e5Sespie
1523c87b03e5Sespie if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1524c87b03e5Sespie {
1525c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST)
1526c87b03e5Sespie {
1527c87b03e5Sespie /* If we would build a constant wider than GCC supports,
1528c87b03e5Sespie leave the conversion unfolded. */
1529c87b03e5Sespie if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1530c87b03e5Sespie return t;
1531c87b03e5Sespie
1532c87b03e5Sespie /* If we are trying to make a sizetype for a small integer, use
1533c87b03e5Sespie size_int to pick up cached types to reduce duplicate nodes. */
1534c87b03e5Sespie if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1535c87b03e5Sespie && !TREE_CONSTANT_OVERFLOW (arg1)
1536c87b03e5Sespie && compare_tree_int (arg1, 10000) < 0)
1537c87b03e5Sespie return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1538c87b03e5Sespie
1539c87b03e5Sespie /* Given an integer constant, make new constant with new type,
1540c87b03e5Sespie appropriately sign-extended or truncated. */
1541c87b03e5Sespie t = build_int_2 (TREE_INT_CST_LOW (arg1),
1542c87b03e5Sespie TREE_INT_CST_HIGH (arg1));
1543c87b03e5Sespie TREE_TYPE (t) = type;
1544c87b03e5Sespie /* Indicate an overflow if (1) ARG1 already overflowed,
1545c87b03e5Sespie or (2) force_fit_type indicates an overflow.
1546c87b03e5Sespie Tell force_fit_type that an overflow has already occurred
1547c87b03e5Sespie if ARG1 is a too-large unsigned value and T is signed.
1548c87b03e5Sespie But don't indicate an overflow if converting a pointer. */
1549c87b03e5Sespie TREE_OVERFLOW (t)
1550c87b03e5Sespie = ((force_fit_type (t,
1551c87b03e5Sespie (TREE_INT_CST_HIGH (arg1) < 0
1552c87b03e5Sespie && (TREE_UNSIGNED (type)
1553c87b03e5Sespie < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1554c87b03e5Sespie && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1555c87b03e5Sespie || TREE_OVERFLOW (arg1));
1556c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t)
1557c87b03e5Sespie = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1558c87b03e5Sespie }
1559c87b03e5Sespie else if (TREE_CODE (arg1) == REAL_CST)
1560c87b03e5Sespie {
1561c87b03e5Sespie /* Don't initialize these, use assignments.
1562c87b03e5Sespie Initialized local aggregates don't work on old compilers. */
1563c87b03e5Sespie REAL_VALUE_TYPE x;
1564c87b03e5Sespie REAL_VALUE_TYPE l;
1565c87b03e5Sespie REAL_VALUE_TYPE u;
1566c87b03e5Sespie tree type1 = TREE_TYPE (arg1);
1567c87b03e5Sespie int no_upper_bound;
1568c87b03e5Sespie
1569c87b03e5Sespie x = TREE_REAL_CST (arg1);
1570c87b03e5Sespie l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1571c87b03e5Sespie
1572c87b03e5Sespie no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1573c87b03e5Sespie if (!no_upper_bound)
1574c87b03e5Sespie u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1575c87b03e5Sespie
1576c87b03e5Sespie /* See if X will be in range after truncation towards 0.
1577c87b03e5Sespie To compensate for truncation, move the bounds away from 0,
1578c87b03e5Sespie but reject if X exactly equals the adjusted bounds. */
1579c87b03e5Sespie REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1580c87b03e5Sespie if (!no_upper_bound)
1581c87b03e5Sespie REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1582c87b03e5Sespie /* If X is a NaN, use zero instead and show we have an overflow.
1583c87b03e5Sespie Otherwise, range check. */
1584c87b03e5Sespie if (REAL_VALUE_ISNAN (x))
1585c87b03e5Sespie overflow = 1, x = dconst0;
1586c87b03e5Sespie else if (! (REAL_VALUES_LESS (l, x)
1587c87b03e5Sespie && !no_upper_bound
1588c87b03e5Sespie && REAL_VALUES_LESS (x, u)))
1589c87b03e5Sespie overflow = 1;
1590c87b03e5Sespie
1591c87b03e5Sespie {
1592c87b03e5Sespie HOST_WIDE_INT low, high;
1593c87b03e5Sespie REAL_VALUE_TO_INT (&low, &high, x);
1594c87b03e5Sespie t = build_int_2 (low, high);
1595c87b03e5Sespie }
1596c87b03e5Sespie TREE_TYPE (t) = type;
1597c87b03e5Sespie TREE_OVERFLOW (t)
1598c87b03e5Sespie = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1599c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t)
1600c87b03e5Sespie = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1601c87b03e5Sespie }
1602c87b03e5Sespie TREE_TYPE (t) = type;
1603c87b03e5Sespie }
1604c87b03e5Sespie else if (TREE_CODE (type) == REAL_TYPE)
1605c87b03e5Sespie {
1606c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST)
1607c87b03e5Sespie return build_real_from_int_cst (type, arg1);
1608c87b03e5Sespie if (TREE_CODE (arg1) == REAL_CST)
1609c87b03e5Sespie {
1610c87b03e5Sespie if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1611c87b03e5Sespie {
1612c87b03e5Sespie /* We make a copy of ARG1 so that we don't modify an
1613c87b03e5Sespie existing constant tree. */
1614c87b03e5Sespie t = copy_node (arg1);
1615c87b03e5Sespie TREE_TYPE (t) = type;
1616c87b03e5Sespie return t;
1617c87b03e5Sespie }
1618c87b03e5Sespie
1619c87b03e5Sespie t = build_real (type,
1620c87b03e5Sespie real_value_truncate (TYPE_MODE (type),
1621c87b03e5Sespie TREE_REAL_CST (arg1)));
1622c87b03e5Sespie
1623c87b03e5Sespie TREE_OVERFLOW (t)
1624c87b03e5Sespie = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1625c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t)
1626c87b03e5Sespie = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1627c87b03e5Sespie return t;
1628c87b03e5Sespie }
1629c87b03e5Sespie }
1630c87b03e5Sespie TREE_CONSTANT (t) = 1;
1631c87b03e5Sespie return t;
1632c87b03e5Sespie }
1633c87b03e5Sespie
1634c87b03e5Sespie /* Return an expr equal to X but certainly not valid as an lvalue. */
1635c87b03e5Sespie
1636c87b03e5Sespie tree
non_lvalue(x)1637c87b03e5Sespie non_lvalue (x)
1638c87b03e5Sespie tree x;
1639c87b03e5Sespie {
1640c87b03e5Sespie tree result;
1641c87b03e5Sespie
1642c87b03e5Sespie /* These things are certainly not lvalues. */
1643c87b03e5Sespie if (TREE_CODE (x) == NON_LVALUE_EXPR
1644c87b03e5Sespie || TREE_CODE (x) == INTEGER_CST
1645c87b03e5Sespie || TREE_CODE (x) == REAL_CST
1646c87b03e5Sespie || TREE_CODE (x) == STRING_CST
1647c87b03e5Sespie || TREE_CODE (x) == ADDR_EXPR)
1648c87b03e5Sespie return x;
1649c87b03e5Sespie
1650c87b03e5Sespie result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1651c87b03e5Sespie TREE_CONSTANT (result) = TREE_CONSTANT (x);
1652c87b03e5Sespie return result;
1653c87b03e5Sespie }
1654c87b03e5Sespie
1655c87b03e5Sespie /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1656c87b03e5Sespie Zero means allow extended lvalues. */
1657c87b03e5Sespie
1658c87b03e5Sespie int pedantic_lvalues;
1659c87b03e5Sespie
1660c87b03e5Sespie /* When pedantic, return an expr equal to X but certainly not valid as a
1661c87b03e5Sespie pedantic lvalue. Otherwise, return X. */
1662c87b03e5Sespie
1663c87b03e5Sespie tree
pedantic_non_lvalue(x)1664c87b03e5Sespie pedantic_non_lvalue (x)
1665c87b03e5Sespie tree x;
1666c87b03e5Sespie {
1667c87b03e5Sespie if (pedantic_lvalues)
1668c87b03e5Sespie return non_lvalue (x);
1669c87b03e5Sespie else
1670c87b03e5Sespie return x;
1671c87b03e5Sespie }
1672c87b03e5Sespie
1673c87b03e5Sespie /* Given a tree comparison code, return the code that is the logical inverse
1674c87b03e5Sespie of the given code. It is not safe to do this for floating-point
1675c87b03e5Sespie comparisons, except for NE_EXPR and EQ_EXPR. */
1676c87b03e5Sespie
1677c87b03e5Sespie static enum tree_code
invert_tree_comparison(code)1678c87b03e5Sespie invert_tree_comparison (code)
1679c87b03e5Sespie enum tree_code code;
1680c87b03e5Sespie {
1681c87b03e5Sespie switch (code)
1682c87b03e5Sespie {
1683c87b03e5Sespie case EQ_EXPR:
1684c87b03e5Sespie return NE_EXPR;
1685c87b03e5Sespie case NE_EXPR:
1686c87b03e5Sespie return EQ_EXPR;
1687c87b03e5Sespie case GT_EXPR:
1688c87b03e5Sespie return LE_EXPR;
1689c87b03e5Sespie case GE_EXPR:
1690c87b03e5Sespie return LT_EXPR;
1691c87b03e5Sespie case LT_EXPR:
1692c87b03e5Sespie return GE_EXPR;
1693c87b03e5Sespie case LE_EXPR:
1694c87b03e5Sespie return GT_EXPR;
1695c87b03e5Sespie default:
1696c87b03e5Sespie abort ();
1697c87b03e5Sespie }
1698c87b03e5Sespie }
1699c87b03e5Sespie
1700c87b03e5Sespie /* Similar, but return the comparison that results if the operands are
1701c87b03e5Sespie swapped. This is safe for floating-point. */
1702c87b03e5Sespie
1703c87b03e5Sespie static enum tree_code
swap_tree_comparison(code)1704c87b03e5Sespie swap_tree_comparison (code)
1705c87b03e5Sespie enum tree_code code;
1706c87b03e5Sespie {
1707c87b03e5Sespie switch (code)
1708c87b03e5Sespie {
1709c87b03e5Sespie case EQ_EXPR:
1710c87b03e5Sespie case NE_EXPR:
1711c87b03e5Sespie return code;
1712c87b03e5Sespie case GT_EXPR:
1713c87b03e5Sespie return LT_EXPR;
1714c87b03e5Sespie case GE_EXPR:
1715c87b03e5Sespie return LE_EXPR;
1716c87b03e5Sespie case LT_EXPR:
1717c87b03e5Sespie return GT_EXPR;
1718c87b03e5Sespie case LE_EXPR:
1719c87b03e5Sespie return GE_EXPR;
1720c87b03e5Sespie default:
1721c87b03e5Sespie abort ();
1722c87b03e5Sespie }
1723c87b03e5Sespie }
1724c87b03e5Sespie
1725c87b03e5Sespie
1726c87b03e5Sespie /* Convert a comparison tree code from an enum tree_code representation
1727c87b03e5Sespie into a compcode bit-based encoding. This function is the inverse of
1728c87b03e5Sespie compcode_to_comparison. */
1729c87b03e5Sespie
1730c87b03e5Sespie static int
comparison_to_compcode(code)1731c87b03e5Sespie comparison_to_compcode (code)
1732c87b03e5Sespie enum tree_code code;
1733c87b03e5Sespie {
1734c87b03e5Sespie switch (code)
1735c87b03e5Sespie {
1736c87b03e5Sespie case LT_EXPR:
1737c87b03e5Sespie return COMPCODE_LT;
1738c87b03e5Sespie case EQ_EXPR:
1739c87b03e5Sespie return COMPCODE_EQ;
1740c87b03e5Sespie case LE_EXPR:
1741c87b03e5Sespie return COMPCODE_LE;
1742c87b03e5Sespie case GT_EXPR:
1743c87b03e5Sespie return COMPCODE_GT;
1744c87b03e5Sespie case NE_EXPR:
1745c87b03e5Sespie return COMPCODE_NE;
1746c87b03e5Sespie case GE_EXPR:
1747c87b03e5Sespie return COMPCODE_GE;
1748c87b03e5Sespie default:
1749c87b03e5Sespie abort ();
1750c87b03e5Sespie }
1751c87b03e5Sespie }
1752c87b03e5Sespie
1753c87b03e5Sespie /* Convert a compcode bit-based encoding of a comparison operator back
1754c87b03e5Sespie to GCC's enum tree_code representation. This function is the
1755c87b03e5Sespie inverse of comparison_to_compcode. */
1756c87b03e5Sespie
1757c87b03e5Sespie static enum tree_code
compcode_to_comparison(code)1758c87b03e5Sespie compcode_to_comparison (code)
1759c87b03e5Sespie int code;
1760c87b03e5Sespie {
1761c87b03e5Sespie switch (code)
1762c87b03e5Sespie {
1763c87b03e5Sespie case COMPCODE_LT:
1764c87b03e5Sespie return LT_EXPR;
1765c87b03e5Sespie case COMPCODE_EQ:
1766c87b03e5Sespie return EQ_EXPR;
1767c87b03e5Sespie case COMPCODE_LE:
1768c87b03e5Sespie return LE_EXPR;
1769c87b03e5Sespie case COMPCODE_GT:
1770c87b03e5Sespie return GT_EXPR;
1771c87b03e5Sespie case COMPCODE_NE:
1772c87b03e5Sespie return NE_EXPR;
1773c87b03e5Sespie case COMPCODE_GE:
1774c87b03e5Sespie return GE_EXPR;
1775c87b03e5Sespie default:
1776c87b03e5Sespie abort ();
1777c87b03e5Sespie }
1778c87b03e5Sespie }
1779c87b03e5Sespie
1780c87b03e5Sespie /* Return nonzero if CODE is a tree code that represents a truth value. */
1781c87b03e5Sespie
1782c87b03e5Sespie static int
truth_value_p(code)1783c87b03e5Sespie truth_value_p (code)
1784c87b03e5Sespie enum tree_code code;
1785c87b03e5Sespie {
1786c87b03e5Sespie return (TREE_CODE_CLASS (code) == '<'
1787c87b03e5Sespie || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1788c87b03e5Sespie || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1789c87b03e5Sespie || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1790c87b03e5Sespie }
1791c87b03e5Sespie
1792c87b03e5Sespie /* Return nonzero if two operands are necessarily equal.
1793c87b03e5Sespie If ONLY_CONST is nonzero, only return nonzero for constants.
1794c87b03e5Sespie This function tests whether the operands are indistinguishable;
1795c87b03e5Sespie it does not test whether they are equal using C's == operation.
1796c87b03e5Sespie The distinction is important for IEEE floating point, because
1797c87b03e5Sespie (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1798c87b03e5Sespie (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1799c87b03e5Sespie
1800c87b03e5Sespie int
operand_equal_p(arg0,arg1,only_const)1801c87b03e5Sespie operand_equal_p (arg0, arg1, only_const)
1802c87b03e5Sespie tree arg0, arg1;
1803c87b03e5Sespie int only_const;
1804c87b03e5Sespie {
1805c87b03e5Sespie /* If both types don't have the same signedness, then we can't consider
1806c87b03e5Sespie them equal. We must check this before the STRIP_NOPS calls
1807c87b03e5Sespie because they may change the signedness of the arguments. */
1808c87b03e5Sespie if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1809c87b03e5Sespie return 0;
1810c87b03e5Sespie
1811c87b03e5Sespie STRIP_NOPS (arg0);
1812c87b03e5Sespie STRIP_NOPS (arg1);
1813c87b03e5Sespie
1814c87b03e5Sespie if (TREE_CODE (arg0) != TREE_CODE (arg1)
1815c87b03e5Sespie /* This is needed for conversions and for COMPONENT_REF.
1816c87b03e5Sespie Might as well play it safe and always test this. */
1817c87b03e5Sespie || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1818c87b03e5Sespie || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1819c87b03e5Sespie || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1820c87b03e5Sespie return 0;
1821c87b03e5Sespie
1822c87b03e5Sespie /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1823c87b03e5Sespie We don't care about side effects in that case because the SAVE_EXPR
1824c87b03e5Sespie takes care of that for us. In all other cases, two expressions are
1825c87b03e5Sespie equal if they have no side effects. If we have two identical
1826c87b03e5Sespie expressions with side effects that should be treated the same due
1827c87b03e5Sespie to the only side effects being identical SAVE_EXPR's, that will
1828c87b03e5Sespie be detected in the recursive calls below. */
1829c87b03e5Sespie if (arg0 == arg1 && ! only_const
1830c87b03e5Sespie && (TREE_CODE (arg0) == SAVE_EXPR
1831c87b03e5Sespie || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1832c87b03e5Sespie return 1;
1833c87b03e5Sespie
1834c87b03e5Sespie /* Next handle constant cases, those for which we can return 1 even
1835c87b03e5Sespie if ONLY_CONST is set. */
1836c87b03e5Sespie if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1837c87b03e5Sespie switch (TREE_CODE (arg0))
1838c87b03e5Sespie {
1839c87b03e5Sespie case INTEGER_CST:
1840c87b03e5Sespie return (! TREE_CONSTANT_OVERFLOW (arg0)
1841c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg1)
1842c87b03e5Sespie && tree_int_cst_equal (arg0, arg1));
1843c87b03e5Sespie
1844c87b03e5Sespie case REAL_CST:
1845c87b03e5Sespie return (! TREE_CONSTANT_OVERFLOW (arg0)
1846c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg1)
1847c87b03e5Sespie && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1848c87b03e5Sespie TREE_REAL_CST (arg1)));
1849c87b03e5Sespie
1850c87b03e5Sespie case VECTOR_CST:
1851c87b03e5Sespie {
1852c87b03e5Sespie tree v1, v2;
1853c87b03e5Sespie
1854c87b03e5Sespie if (TREE_CONSTANT_OVERFLOW (arg0)
1855c87b03e5Sespie || TREE_CONSTANT_OVERFLOW (arg1))
1856c87b03e5Sespie return 0;
1857c87b03e5Sespie
1858c87b03e5Sespie v1 = TREE_VECTOR_CST_ELTS (arg0);
1859c87b03e5Sespie v2 = TREE_VECTOR_CST_ELTS (arg1);
1860c87b03e5Sespie while (v1 && v2)
1861c87b03e5Sespie {
1862c87b03e5Sespie if (!operand_equal_p (v1, v2, only_const))
1863c87b03e5Sespie return 0;
1864c87b03e5Sespie v1 = TREE_CHAIN (v1);
1865c87b03e5Sespie v2 = TREE_CHAIN (v2);
1866c87b03e5Sespie }
1867c87b03e5Sespie
1868c87b03e5Sespie return 1;
1869c87b03e5Sespie }
1870c87b03e5Sespie
1871c87b03e5Sespie case COMPLEX_CST:
1872c87b03e5Sespie return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1873c87b03e5Sespie only_const)
1874c87b03e5Sespie && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1875c87b03e5Sespie only_const));
1876c87b03e5Sespie
1877c87b03e5Sespie case STRING_CST:
1878c87b03e5Sespie return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1879c87b03e5Sespie && ! memcmp (TREE_STRING_POINTER (arg0),
1880c87b03e5Sespie TREE_STRING_POINTER (arg1),
1881c87b03e5Sespie TREE_STRING_LENGTH (arg0)));
1882c87b03e5Sespie
1883c87b03e5Sespie case ADDR_EXPR:
1884c87b03e5Sespie return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1885c87b03e5Sespie 0);
1886c87b03e5Sespie default:
1887c87b03e5Sespie break;
1888c87b03e5Sespie }
1889c87b03e5Sespie
1890c87b03e5Sespie if (only_const)
1891c87b03e5Sespie return 0;
1892c87b03e5Sespie
1893c87b03e5Sespie switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1894c87b03e5Sespie {
1895c87b03e5Sespie case '1':
1896c87b03e5Sespie /* Two conversions are equal only if signedness and modes match. */
1897c87b03e5Sespie if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1898c87b03e5Sespie && (TREE_UNSIGNED (TREE_TYPE (arg0))
1899c87b03e5Sespie != TREE_UNSIGNED (TREE_TYPE (arg1))))
1900c87b03e5Sespie return 0;
1901c87b03e5Sespie
1902c87b03e5Sespie return operand_equal_p (TREE_OPERAND (arg0, 0),
1903c87b03e5Sespie TREE_OPERAND (arg1, 0), 0);
1904c87b03e5Sespie
1905c87b03e5Sespie case '<':
1906c87b03e5Sespie case '2':
1907c87b03e5Sespie if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1908c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1909c87b03e5Sespie 0))
1910c87b03e5Sespie return 1;
1911c87b03e5Sespie
1912c87b03e5Sespie /* For commutative ops, allow the other order. */
1913c87b03e5Sespie return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1914c87b03e5Sespie || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1915c87b03e5Sespie || TREE_CODE (arg0) == BIT_IOR_EXPR
1916c87b03e5Sespie || TREE_CODE (arg0) == BIT_XOR_EXPR
1917c87b03e5Sespie || TREE_CODE (arg0) == BIT_AND_EXPR
1918c87b03e5Sespie || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1919c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 0),
1920c87b03e5Sespie TREE_OPERAND (arg1, 1), 0)
1921c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
1922c87b03e5Sespie TREE_OPERAND (arg1, 0), 0));
1923c87b03e5Sespie
1924c87b03e5Sespie case 'r':
1925c87b03e5Sespie /* If either of the pointer (or reference) expressions we are dereferencing
1926c87b03e5Sespie contain a side effect, these cannot be equal. */
1927c87b03e5Sespie if (TREE_SIDE_EFFECTS (arg0)
1928c87b03e5Sespie || TREE_SIDE_EFFECTS (arg1))
1929c87b03e5Sespie return 0;
1930c87b03e5Sespie
1931c87b03e5Sespie switch (TREE_CODE (arg0))
1932c87b03e5Sespie {
1933c87b03e5Sespie case INDIRECT_REF:
1934c87b03e5Sespie return operand_equal_p (TREE_OPERAND (arg0, 0),
1935c87b03e5Sespie TREE_OPERAND (arg1, 0), 0);
1936c87b03e5Sespie
1937c87b03e5Sespie case COMPONENT_REF:
1938c87b03e5Sespie case ARRAY_REF:
1939c87b03e5Sespie case ARRAY_RANGE_REF:
1940c87b03e5Sespie return (operand_equal_p (TREE_OPERAND (arg0, 0),
1941c87b03e5Sespie TREE_OPERAND (arg1, 0), 0)
1942c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
1943c87b03e5Sespie TREE_OPERAND (arg1, 1), 0));
1944c87b03e5Sespie
1945c87b03e5Sespie case BIT_FIELD_REF:
1946c87b03e5Sespie return (operand_equal_p (TREE_OPERAND (arg0, 0),
1947c87b03e5Sespie TREE_OPERAND (arg1, 0), 0)
1948c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
1949c87b03e5Sespie TREE_OPERAND (arg1, 1), 0)
1950c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 2),
1951c87b03e5Sespie TREE_OPERAND (arg1, 2), 0));
1952c87b03e5Sespie default:
1953c87b03e5Sespie return 0;
1954c87b03e5Sespie }
1955c87b03e5Sespie
1956c87b03e5Sespie case 'e':
1957c87b03e5Sespie if (TREE_CODE (arg0) == RTL_EXPR)
1958c87b03e5Sespie return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1959c87b03e5Sespie return 0;
1960c87b03e5Sespie
1961c87b03e5Sespie default:
1962c87b03e5Sespie return 0;
1963c87b03e5Sespie }
1964c87b03e5Sespie }
1965c87b03e5Sespie
1966c87b03e5Sespie /* Similar to operand_equal_p, but see if ARG0 might have been made by
1967c87b03e5Sespie shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1968c87b03e5Sespie
1969c87b03e5Sespie When in doubt, return 0. */
1970c87b03e5Sespie
1971c87b03e5Sespie static int
operand_equal_for_comparison_p(arg0,arg1,other)1972c87b03e5Sespie operand_equal_for_comparison_p (arg0, arg1, other)
1973c87b03e5Sespie tree arg0, arg1;
1974c87b03e5Sespie tree other;
1975c87b03e5Sespie {
1976c87b03e5Sespie int unsignedp1, unsignedpo;
1977c87b03e5Sespie tree primarg0, primarg1, primother;
1978c87b03e5Sespie unsigned int correct_width;
1979c87b03e5Sespie
1980c87b03e5Sespie if (operand_equal_p (arg0, arg1, 0))
1981c87b03e5Sespie return 1;
1982c87b03e5Sespie
1983c87b03e5Sespie if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1984c87b03e5Sespie || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1985c87b03e5Sespie return 0;
1986c87b03e5Sespie
1987c87b03e5Sespie /* Discard any conversions that don't change the modes of ARG0 and ARG1
1988c87b03e5Sespie and see if the inner values are the same. This removes any
1989c87b03e5Sespie signedness comparison, which doesn't matter here. */
1990c87b03e5Sespie primarg0 = arg0, primarg1 = arg1;
1991c87b03e5Sespie STRIP_NOPS (primarg0);
1992c87b03e5Sespie STRIP_NOPS (primarg1);
1993c87b03e5Sespie if (operand_equal_p (primarg0, primarg1, 0))
1994c87b03e5Sespie return 1;
1995c87b03e5Sespie
1996c87b03e5Sespie /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1997c87b03e5Sespie actual comparison operand, ARG0.
1998c87b03e5Sespie
1999c87b03e5Sespie First throw away any conversions to wider types
2000c87b03e5Sespie already present in the operands. */
2001c87b03e5Sespie
2002c87b03e5Sespie primarg1 = get_narrower (arg1, &unsignedp1);
2003c87b03e5Sespie primother = get_narrower (other, &unsignedpo);
2004c87b03e5Sespie
2005c87b03e5Sespie correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2006c87b03e5Sespie if (unsignedp1 == unsignedpo
2007c87b03e5Sespie && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2008c87b03e5Sespie && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2009c87b03e5Sespie {
2010c87b03e5Sespie tree type = TREE_TYPE (arg0);
2011c87b03e5Sespie
2012c87b03e5Sespie /* Make sure shorter operand is extended the right way
2013c87b03e5Sespie to match the longer operand. */
2014c87b03e5Sespie primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2015c87b03e5Sespie (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2016c87b03e5Sespie
2017c87b03e5Sespie if (operand_equal_p (arg0, convert (type, primarg1), 0))
2018c87b03e5Sespie return 1;
2019c87b03e5Sespie }
2020c87b03e5Sespie
2021c87b03e5Sespie return 0;
2022c87b03e5Sespie }
2023c87b03e5Sespie
2024c87b03e5Sespie /* See if ARG is an expression that is either a comparison or is performing
2025c87b03e5Sespie arithmetic on comparisons. The comparisons must only be comparing
2026c87b03e5Sespie two different values, which will be stored in *CVAL1 and *CVAL2; if
2027c87b03e5Sespie they are nonzero it means that some operands have already been found.
2028c87b03e5Sespie No variables may be used anywhere else in the expression except in the
2029c87b03e5Sespie comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2030c87b03e5Sespie the expression and save_expr needs to be called with CVAL1 and CVAL2.
2031c87b03e5Sespie
2032c87b03e5Sespie If this is true, return 1. Otherwise, return zero. */
2033c87b03e5Sespie
2034c87b03e5Sespie static int
twoval_comparison_p(arg,cval1,cval2,save_p)2035c87b03e5Sespie twoval_comparison_p (arg, cval1, cval2, save_p)
2036c87b03e5Sespie tree arg;
2037c87b03e5Sespie tree *cval1, *cval2;
2038c87b03e5Sespie int *save_p;
2039c87b03e5Sespie {
2040c87b03e5Sespie enum tree_code code = TREE_CODE (arg);
2041c87b03e5Sespie char class = TREE_CODE_CLASS (code);
2042c87b03e5Sespie
2043c87b03e5Sespie /* We can handle some of the 'e' cases here. */
2044c87b03e5Sespie if (class == 'e' && code == TRUTH_NOT_EXPR)
2045c87b03e5Sespie class = '1';
2046c87b03e5Sespie else if (class == 'e'
2047c87b03e5Sespie && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2048c87b03e5Sespie || code == COMPOUND_EXPR))
2049c87b03e5Sespie class = '2';
2050c87b03e5Sespie
2051c87b03e5Sespie else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2052c87b03e5Sespie && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2053c87b03e5Sespie {
2054c87b03e5Sespie /* If we've already found a CVAL1 or CVAL2, this expression is
2055c87b03e5Sespie two complex to handle. */
2056c87b03e5Sespie if (*cval1 || *cval2)
2057c87b03e5Sespie return 0;
2058c87b03e5Sespie
2059c87b03e5Sespie class = '1';
2060c87b03e5Sespie *save_p = 1;
2061c87b03e5Sespie }
2062c87b03e5Sespie
2063c87b03e5Sespie switch (class)
2064c87b03e5Sespie {
2065c87b03e5Sespie case '1':
2066c87b03e5Sespie return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2067c87b03e5Sespie
2068c87b03e5Sespie case '2':
2069c87b03e5Sespie return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2070c87b03e5Sespie && twoval_comparison_p (TREE_OPERAND (arg, 1),
2071c87b03e5Sespie cval1, cval2, save_p));
2072c87b03e5Sespie
2073c87b03e5Sespie case 'c':
2074c87b03e5Sespie return 1;
2075c87b03e5Sespie
2076c87b03e5Sespie case 'e':
2077c87b03e5Sespie if (code == COND_EXPR)
2078c87b03e5Sespie return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2079c87b03e5Sespie cval1, cval2, save_p)
2080c87b03e5Sespie && twoval_comparison_p (TREE_OPERAND (arg, 1),
2081c87b03e5Sespie cval1, cval2, save_p)
2082c87b03e5Sespie && twoval_comparison_p (TREE_OPERAND (arg, 2),
2083c87b03e5Sespie cval1, cval2, save_p));
2084c87b03e5Sespie return 0;
2085c87b03e5Sespie
2086c87b03e5Sespie case '<':
2087c87b03e5Sespie /* First see if we can handle the first operand, then the second. For
2088c87b03e5Sespie the second operand, we know *CVAL1 can't be zero. It must be that
2089c87b03e5Sespie one side of the comparison is each of the values; test for the
2090c87b03e5Sespie case where this isn't true by failing if the two operands
2091c87b03e5Sespie are the same. */
2092c87b03e5Sespie
2093c87b03e5Sespie if (operand_equal_p (TREE_OPERAND (arg, 0),
2094c87b03e5Sespie TREE_OPERAND (arg, 1), 0))
2095c87b03e5Sespie return 0;
2096c87b03e5Sespie
2097c87b03e5Sespie if (*cval1 == 0)
2098c87b03e5Sespie *cval1 = TREE_OPERAND (arg, 0);
2099c87b03e5Sespie else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2100c87b03e5Sespie ;
2101c87b03e5Sespie else if (*cval2 == 0)
2102c87b03e5Sespie *cval2 = TREE_OPERAND (arg, 0);
2103c87b03e5Sespie else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2104c87b03e5Sespie ;
2105c87b03e5Sespie else
2106c87b03e5Sespie return 0;
2107c87b03e5Sespie
2108c87b03e5Sespie if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2109c87b03e5Sespie ;
2110c87b03e5Sespie else if (*cval2 == 0)
2111c87b03e5Sespie *cval2 = TREE_OPERAND (arg, 1);
2112c87b03e5Sespie else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2113c87b03e5Sespie ;
2114c87b03e5Sespie else
2115c87b03e5Sespie return 0;
2116c87b03e5Sespie
2117c87b03e5Sespie return 1;
2118c87b03e5Sespie
2119c87b03e5Sespie default:
2120c87b03e5Sespie return 0;
2121c87b03e5Sespie }
2122c87b03e5Sespie }
2123c87b03e5Sespie
2124c87b03e5Sespie /* ARG is a tree that is known to contain just arithmetic operations and
2125c87b03e5Sespie comparisons. Evaluate the operations in the tree substituting NEW0 for
2126c87b03e5Sespie any occurrence of OLD0 as an operand of a comparison and likewise for
2127c87b03e5Sespie NEW1 and OLD1. */
2128c87b03e5Sespie
2129c87b03e5Sespie static tree
eval_subst(arg,old0,new0,old1,new1)2130c87b03e5Sespie eval_subst (arg, old0, new0, old1, new1)
2131c87b03e5Sespie tree arg;
2132c87b03e5Sespie tree old0, new0, old1, new1;
2133c87b03e5Sespie {
2134c87b03e5Sespie tree type = TREE_TYPE (arg);
2135c87b03e5Sespie enum tree_code code = TREE_CODE (arg);
2136c87b03e5Sespie char class = TREE_CODE_CLASS (code);
2137c87b03e5Sespie
2138c87b03e5Sespie /* We can handle some of the 'e' cases here. */
2139c87b03e5Sespie if (class == 'e' && code == TRUTH_NOT_EXPR)
2140c87b03e5Sespie class = '1';
2141c87b03e5Sespie else if (class == 'e'
2142c87b03e5Sespie && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2143c87b03e5Sespie class = '2';
2144c87b03e5Sespie
2145c87b03e5Sespie switch (class)
2146c87b03e5Sespie {
2147c87b03e5Sespie case '1':
2148c87b03e5Sespie return fold (build1 (code, type,
2149c87b03e5Sespie eval_subst (TREE_OPERAND (arg, 0),
2150c87b03e5Sespie old0, new0, old1, new1)));
2151c87b03e5Sespie
2152c87b03e5Sespie case '2':
2153c87b03e5Sespie return fold (build (code, type,
2154c87b03e5Sespie eval_subst (TREE_OPERAND (arg, 0),
2155c87b03e5Sespie old0, new0, old1, new1),
2156c87b03e5Sespie eval_subst (TREE_OPERAND (arg, 1),
2157c87b03e5Sespie old0, new0, old1, new1)));
2158c87b03e5Sespie
2159c87b03e5Sespie case 'e':
2160c87b03e5Sespie switch (code)
2161c87b03e5Sespie {
2162c87b03e5Sespie case SAVE_EXPR:
2163c87b03e5Sespie return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2164c87b03e5Sespie
2165c87b03e5Sespie case COMPOUND_EXPR:
2166c87b03e5Sespie return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2167c87b03e5Sespie
2168c87b03e5Sespie case COND_EXPR:
2169c87b03e5Sespie return fold (build (code, type,
2170c87b03e5Sespie eval_subst (TREE_OPERAND (arg, 0),
2171c87b03e5Sespie old0, new0, old1, new1),
2172c87b03e5Sespie eval_subst (TREE_OPERAND (arg, 1),
2173c87b03e5Sespie old0, new0, old1, new1),
2174c87b03e5Sespie eval_subst (TREE_OPERAND (arg, 2),
2175c87b03e5Sespie old0, new0, old1, new1)));
2176c87b03e5Sespie default:
2177c87b03e5Sespie break;
2178c87b03e5Sespie }
2179c87b03e5Sespie /* fall through - ??? */
2180c87b03e5Sespie
2181c87b03e5Sespie case '<':
2182c87b03e5Sespie {
2183c87b03e5Sespie tree arg0 = TREE_OPERAND (arg, 0);
2184c87b03e5Sespie tree arg1 = TREE_OPERAND (arg, 1);
2185c87b03e5Sespie
2186c87b03e5Sespie /* We need to check both for exact equality and tree equality. The
2187c87b03e5Sespie former will be true if the operand has a side-effect. In that
2188c87b03e5Sespie case, we know the operand occurred exactly once. */
2189c87b03e5Sespie
2190c87b03e5Sespie if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2191c87b03e5Sespie arg0 = new0;
2192c87b03e5Sespie else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2193c87b03e5Sespie arg0 = new1;
2194c87b03e5Sespie
2195c87b03e5Sespie if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2196c87b03e5Sespie arg1 = new0;
2197c87b03e5Sespie else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2198c87b03e5Sespie arg1 = new1;
2199c87b03e5Sespie
2200c87b03e5Sespie return fold (build (code, type, arg0, arg1));
2201c87b03e5Sespie }
2202c87b03e5Sespie
2203c87b03e5Sespie default:
2204c87b03e5Sespie return arg;
2205c87b03e5Sespie }
2206c87b03e5Sespie }
2207c87b03e5Sespie
2208c87b03e5Sespie /* Return a tree for the case when the result of an expression is RESULT
2209c87b03e5Sespie converted to TYPE and OMITTED was previously an operand of the expression
2210c87b03e5Sespie but is now not needed (e.g., we folded OMITTED * 0).
2211c87b03e5Sespie
2212c87b03e5Sespie If OMITTED has side effects, we must evaluate it. Otherwise, just do
2213c87b03e5Sespie the conversion of RESULT to TYPE. */
2214c87b03e5Sespie
2215c87b03e5Sespie static tree
omit_one_operand(type,result,omitted)2216c87b03e5Sespie omit_one_operand (type, result, omitted)
2217c87b03e5Sespie tree type, result, omitted;
2218c87b03e5Sespie {
2219c87b03e5Sespie tree t = convert (type, result);
2220c87b03e5Sespie
2221c87b03e5Sespie if (TREE_SIDE_EFFECTS (omitted))
2222c87b03e5Sespie return build (COMPOUND_EXPR, type, omitted, t);
2223c87b03e5Sespie
2224c87b03e5Sespie return non_lvalue (t);
2225c87b03e5Sespie }
2226c87b03e5Sespie
2227c87b03e5Sespie /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2228c87b03e5Sespie
2229c87b03e5Sespie static tree
pedantic_omit_one_operand(type,result,omitted)2230c87b03e5Sespie pedantic_omit_one_operand (type, result, omitted)
2231c87b03e5Sespie tree type, result, omitted;
2232c87b03e5Sespie {
2233c87b03e5Sespie tree t = convert (type, result);
2234c87b03e5Sespie
2235c87b03e5Sespie if (TREE_SIDE_EFFECTS (omitted))
2236c87b03e5Sespie return build (COMPOUND_EXPR, type, omitted, t);
2237c87b03e5Sespie
2238c87b03e5Sespie return pedantic_non_lvalue (t);
2239c87b03e5Sespie }
2240c87b03e5Sespie
2241c87b03e5Sespie /* Return a simplified tree node for the truth-negation of ARG. This
2242c87b03e5Sespie never alters ARG itself. We assume that ARG is an operation that
2243c87b03e5Sespie returns a truth value (0 or 1). */
2244c87b03e5Sespie
2245c87b03e5Sespie tree
invert_truthvalue(arg)2246c87b03e5Sespie invert_truthvalue (arg)
2247c87b03e5Sespie tree arg;
2248c87b03e5Sespie {
2249c87b03e5Sespie tree type = TREE_TYPE (arg);
2250c87b03e5Sespie enum tree_code code = TREE_CODE (arg);
2251c87b03e5Sespie
2252c87b03e5Sespie if (code == ERROR_MARK)
2253c87b03e5Sespie return arg;
2254c87b03e5Sespie
2255c87b03e5Sespie /* If this is a comparison, we can simply invert it, except for
2256c87b03e5Sespie floating-point non-equality comparisons, in which case we just
2257c87b03e5Sespie enclose a TRUTH_NOT_EXPR around what we have. */
2258c87b03e5Sespie
2259c87b03e5Sespie if (TREE_CODE_CLASS (code) == '<')
2260c87b03e5Sespie {
2261c87b03e5Sespie if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2262c87b03e5Sespie && !flag_unsafe_math_optimizations
2263c87b03e5Sespie && code != NE_EXPR
2264c87b03e5Sespie && code != EQ_EXPR)
2265c87b03e5Sespie return build1 (TRUTH_NOT_EXPR, type, arg);
2266c87b03e5Sespie else
2267c87b03e5Sespie return build (invert_tree_comparison (code), type,
2268c87b03e5Sespie TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2269c87b03e5Sespie }
2270c87b03e5Sespie
2271c87b03e5Sespie switch (code)
2272c87b03e5Sespie {
2273c87b03e5Sespie case INTEGER_CST:
2274c87b03e5Sespie return convert (type, build_int_2 (integer_zerop (arg), 0));
2275c87b03e5Sespie
2276c87b03e5Sespie case TRUTH_AND_EXPR:
2277c87b03e5Sespie return build (TRUTH_OR_EXPR, type,
2278c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)),
2279c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 1)));
2280c87b03e5Sespie
2281c87b03e5Sespie case TRUTH_OR_EXPR:
2282c87b03e5Sespie return build (TRUTH_AND_EXPR, type,
2283c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)),
2284c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 1)));
2285c87b03e5Sespie
2286c87b03e5Sespie case TRUTH_XOR_EXPR:
2287c87b03e5Sespie /* Here we can invert either operand. We invert the first operand
2288c87b03e5Sespie unless the second operand is a TRUTH_NOT_EXPR in which case our
2289c87b03e5Sespie result is the XOR of the first operand with the inside of the
2290c87b03e5Sespie negation of the second operand. */
2291c87b03e5Sespie
2292c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2293c87b03e5Sespie return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2294c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2295c87b03e5Sespie else
2296c87b03e5Sespie return build (TRUTH_XOR_EXPR, type,
2297c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)),
2298c87b03e5Sespie TREE_OPERAND (arg, 1));
2299c87b03e5Sespie
2300c87b03e5Sespie case TRUTH_ANDIF_EXPR:
2301c87b03e5Sespie return build (TRUTH_ORIF_EXPR, type,
2302c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)),
2303c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 1)));
2304c87b03e5Sespie
2305c87b03e5Sespie case TRUTH_ORIF_EXPR:
2306c87b03e5Sespie return build (TRUTH_ANDIF_EXPR, type,
2307c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)),
2308c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 1)));
2309c87b03e5Sespie
2310c87b03e5Sespie case TRUTH_NOT_EXPR:
2311c87b03e5Sespie return TREE_OPERAND (arg, 0);
2312c87b03e5Sespie
2313c87b03e5Sespie case COND_EXPR:
2314c87b03e5Sespie return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2315c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 1)),
2316c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 2)));
2317c87b03e5Sespie
2318c87b03e5Sespie case COMPOUND_EXPR:
2319c87b03e5Sespie return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2320c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 1)));
2321c87b03e5Sespie
2322c87b03e5Sespie case WITH_RECORD_EXPR:
2323c87b03e5Sespie return build (WITH_RECORD_EXPR, type,
2324c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)),
2325c87b03e5Sespie TREE_OPERAND (arg, 1));
2326c87b03e5Sespie
2327c87b03e5Sespie case NON_LVALUE_EXPR:
2328c87b03e5Sespie return invert_truthvalue (TREE_OPERAND (arg, 0));
2329c87b03e5Sespie
2330c87b03e5Sespie case NOP_EXPR:
2331c87b03e5Sespie case CONVERT_EXPR:
2332c87b03e5Sespie case FLOAT_EXPR:
2333c87b03e5Sespie return build1 (TREE_CODE (arg), type,
2334c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)));
2335c87b03e5Sespie
2336c87b03e5Sespie case BIT_AND_EXPR:
2337c87b03e5Sespie if (!integer_onep (TREE_OPERAND (arg, 1)))
2338c87b03e5Sespie break;
2339c87b03e5Sespie return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2340c87b03e5Sespie
2341c87b03e5Sespie case SAVE_EXPR:
2342c87b03e5Sespie return build1 (TRUTH_NOT_EXPR, type, arg);
2343c87b03e5Sespie
2344c87b03e5Sespie case CLEANUP_POINT_EXPR:
2345c87b03e5Sespie return build1 (CLEANUP_POINT_EXPR, type,
2346c87b03e5Sespie invert_truthvalue (TREE_OPERAND (arg, 0)));
2347c87b03e5Sespie
2348c87b03e5Sespie default:
2349c87b03e5Sespie break;
2350c87b03e5Sespie }
2351c87b03e5Sespie if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2352c87b03e5Sespie abort ();
2353c87b03e5Sespie return build1 (TRUTH_NOT_EXPR, type, arg);
2354c87b03e5Sespie }
2355c87b03e5Sespie
2356c87b03e5Sespie /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2357c87b03e5Sespie operands are another bit-wise operation with a common input. If so,
2358c87b03e5Sespie distribute the bit operations to save an operation and possibly two if
2359c87b03e5Sespie constants are involved. For example, convert
2360c87b03e5Sespie (A | B) & (A | C) into A | (B & C)
2361c87b03e5Sespie Further simplification will occur if B and C are constants.
2362c87b03e5Sespie
2363c87b03e5Sespie If this optimization cannot be done, 0 will be returned. */
2364c87b03e5Sespie
2365c87b03e5Sespie static tree
distribute_bit_expr(code,type,arg0,arg1)2366c87b03e5Sespie distribute_bit_expr (code, type, arg0, arg1)
2367c87b03e5Sespie enum tree_code code;
2368c87b03e5Sespie tree type;
2369c87b03e5Sespie tree arg0, arg1;
2370c87b03e5Sespie {
2371c87b03e5Sespie tree common;
2372c87b03e5Sespie tree left, right;
2373c87b03e5Sespie
2374c87b03e5Sespie if (TREE_CODE (arg0) != TREE_CODE (arg1)
2375c87b03e5Sespie || TREE_CODE (arg0) == code
2376c87b03e5Sespie || (TREE_CODE (arg0) != BIT_AND_EXPR
2377c87b03e5Sespie && TREE_CODE (arg0) != BIT_IOR_EXPR))
2378c87b03e5Sespie return 0;
2379c87b03e5Sespie
2380c87b03e5Sespie if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2381c87b03e5Sespie {
2382c87b03e5Sespie common = TREE_OPERAND (arg0, 0);
2383c87b03e5Sespie left = TREE_OPERAND (arg0, 1);
2384c87b03e5Sespie right = TREE_OPERAND (arg1, 1);
2385c87b03e5Sespie }
2386c87b03e5Sespie else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2387c87b03e5Sespie {
2388c87b03e5Sespie common = TREE_OPERAND (arg0, 0);
2389c87b03e5Sespie left = TREE_OPERAND (arg0, 1);
2390c87b03e5Sespie right = TREE_OPERAND (arg1, 0);
2391c87b03e5Sespie }
2392c87b03e5Sespie else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2393c87b03e5Sespie {
2394c87b03e5Sespie common = TREE_OPERAND (arg0, 1);
2395c87b03e5Sespie left = TREE_OPERAND (arg0, 0);
2396c87b03e5Sespie right = TREE_OPERAND (arg1, 1);
2397c87b03e5Sespie }
2398c87b03e5Sespie else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2399c87b03e5Sespie {
2400c87b03e5Sespie common = TREE_OPERAND (arg0, 1);
2401c87b03e5Sespie left = TREE_OPERAND (arg0, 0);
2402c87b03e5Sespie right = TREE_OPERAND (arg1, 0);
2403c87b03e5Sespie }
2404c87b03e5Sespie else
2405c87b03e5Sespie return 0;
2406c87b03e5Sespie
2407c87b03e5Sespie return fold (build (TREE_CODE (arg0), type, common,
2408c87b03e5Sespie fold (build (code, type, left, right))));
2409c87b03e5Sespie }
2410c87b03e5Sespie
2411c87b03e5Sespie /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2412c87b03e5Sespie starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2413c87b03e5Sespie
2414c87b03e5Sespie static tree
make_bit_field_ref(inner,type,bitsize,bitpos,unsignedp)2415c87b03e5Sespie make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2416c87b03e5Sespie tree inner;
2417c87b03e5Sespie tree type;
2418c87b03e5Sespie int bitsize, bitpos;
2419c87b03e5Sespie int unsignedp;
2420c87b03e5Sespie {
2421c87b03e5Sespie tree result = build (BIT_FIELD_REF, type, inner,
2422c87b03e5Sespie size_int (bitsize), bitsize_int (bitpos));
2423c87b03e5Sespie
2424c87b03e5Sespie TREE_UNSIGNED (result) = unsignedp;
2425c87b03e5Sespie
2426c87b03e5Sespie return result;
2427c87b03e5Sespie }
2428c87b03e5Sespie
2429c87b03e5Sespie /* Optimize a bit-field compare.
2430c87b03e5Sespie
2431c87b03e5Sespie There are two cases: First is a compare against a constant and the
2432c87b03e5Sespie second is a comparison of two items where the fields are at the same
2433c87b03e5Sespie bit position relative to the start of a chunk (byte, halfword, word)
2434c87b03e5Sespie large enough to contain it. In these cases we can avoid the shift
2435c87b03e5Sespie implicit in bitfield extractions.
2436c87b03e5Sespie
2437c87b03e5Sespie For constants, we emit a compare of the shifted constant with the
2438c87b03e5Sespie BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2439c87b03e5Sespie compared. For two fields at the same position, we do the ANDs with the
2440c87b03e5Sespie similar mask and compare the result of the ANDs.
2441c87b03e5Sespie
2442c87b03e5Sespie CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2443c87b03e5Sespie COMPARE_TYPE is the type of the comparison, and LHS and RHS
2444c87b03e5Sespie are the left and right operands of the comparison, respectively.
2445c87b03e5Sespie
2446c87b03e5Sespie If the optimization described above can be done, we return the resulting
2447c87b03e5Sespie tree. Otherwise we return zero. */
2448c87b03e5Sespie
2449c87b03e5Sespie static tree
optimize_bit_field_compare(code,compare_type,lhs,rhs)2450c87b03e5Sespie optimize_bit_field_compare (code, compare_type, lhs, rhs)
2451c87b03e5Sespie enum tree_code code;
2452c87b03e5Sespie tree compare_type;
2453c87b03e5Sespie tree lhs, rhs;
2454c87b03e5Sespie {
2455c87b03e5Sespie HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2456c87b03e5Sespie tree type = TREE_TYPE (lhs);
2457c87b03e5Sespie tree signed_type, unsigned_type;
2458c87b03e5Sespie int const_p = TREE_CODE (rhs) == INTEGER_CST;
2459c87b03e5Sespie enum machine_mode lmode, rmode, nmode;
2460c87b03e5Sespie int lunsignedp, runsignedp;
2461c87b03e5Sespie int lvolatilep = 0, rvolatilep = 0;
2462c87b03e5Sespie tree linner, rinner = NULL_TREE;
2463c87b03e5Sespie tree mask;
2464c87b03e5Sespie tree offset;
2465c87b03e5Sespie
2466c87b03e5Sespie /* Get all the information about the extractions being done. If the bit size
2467c87b03e5Sespie if the same as the size of the underlying object, we aren't doing an
2468c87b03e5Sespie extraction at all and so can do nothing. We also don't want to
2469c87b03e5Sespie do anything if the inner expression is a PLACEHOLDER_EXPR since we
2470c87b03e5Sespie then will no longer be able to replace it. */
2471c87b03e5Sespie linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2472c87b03e5Sespie &lunsignedp, &lvolatilep);
2473c87b03e5Sespie if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2474c87b03e5Sespie || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2475c87b03e5Sespie return 0;
2476c87b03e5Sespie
2477c87b03e5Sespie if (!const_p)
2478c87b03e5Sespie {
2479c87b03e5Sespie /* If this is not a constant, we can only do something if bit positions,
2480c87b03e5Sespie sizes, and signedness are the same. */
2481c87b03e5Sespie rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2482c87b03e5Sespie &runsignedp, &rvolatilep);
2483c87b03e5Sespie
2484c87b03e5Sespie if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2485c87b03e5Sespie || lunsignedp != runsignedp || offset != 0
2486c87b03e5Sespie || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2487c87b03e5Sespie return 0;
2488c87b03e5Sespie }
2489c87b03e5Sespie
2490c87b03e5Sespie /* See if we can find a mode to refer to this field. We should be able to,
2491c87b03e5Sespie but fail if we can't. */
2492c87b03e5Sespie nmode = get_best_mode (lbitsize, lbitpos,
2493c87b03e5Sespie const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2494c87b03e5Sespie : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2495c87b03e5Sespie TYPE_ALIGN (TREE_TYPE (rinner))),
2496c87b03e5Sespie word_mode, lvolatilep || rvolatilep);
2497c87b03e5Sespie if (nmode == VOIDmode)
2498c87b03e5Sespie return 0;
2499c87b03e5Sespie
2500c87b03e5Sespie /* Set signed and unsigned types of the precision of this mode for the
2501c87b03e5Sespie shifts below. */
2502c87b03e5Sespie signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2503c87b03e5Sespie unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2504c87b03e5Sespie
2505c87b03e5Sespie /* Compute the bit position and size for the new reference and our offset
2506c87b03e5Sespie within it. If the new reference is the same size as the original, we
2507c87b03e5Sespie won't optimize anything, so return zero. */
2508c87b03e5Sespie nbitsize = GET_MODE_BITSIZE (nmode);
2509c87b03e5Sespie nbitpos = lbitpos & ~ (nbitsize - 1);
2510c87b03e5Sespie lbitpos -= nbitpos;
2511c87b03e5Sespie if (nbitsize == lbitsize)
2512c87b03e5Sespie return 0;
2513c87b03e5Sespie
2514c87b03e5Sespie if (BYTES_BIG_ENDIAN)
2515c87b03e5Sespie lbitpos = nbitsize - lbitsize - lbitpos;
2516c87b03e5Sespie
2517c87b03e5Sespie /* Make the mask to be used against the extracted field. */
2518c87b03e5Sespie mask = build_int_2 (~0, ~0);
2519c87b03e5Sespie TREE_TYPE (mask) = unsigned_type;
2520c87b03e5Sespie force_fit_type (mask, 0);
2521c87b03e5Sespie mask = convert (unsigned_type, mask);
2522c87b03e5Sespie mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2523c87b03e5Sespie mask = const_binop (RSHIFT_EXPR, mask,
2524c87b03e5Sespie size_int (nbitsize - lbitsize - lbitpos), 0);
2525c87b03e5Sespie
2526c87b03e5Sespie if (! const_p)
2527c87b03e5Sespie /* If not comparing with constant, just rework the comparison
2528c87b03e5Sespie and return. */
2529c87b03e5Sespie return build (code, compare_type,
2530c87b03e5Sespie build (BIT_AND_EXPR, unsigned_type,
2531c87b03e5Sespie make_bit_field_ref (linner, unsigned_type,
2532c87b03e5Sespie nbitsize, nbitpos, 1),
2533c87b03e5Sespie mask),
2534c87b03e5Sespie build (BIT_AND_EXPR, unsigned_type,
2535c87b03e5Sespie make_bit_field_ref (rinner, unsigned_type,
2536c87b03e5Sespie nbitsize, nbitpos, 1),
2537c87b03e5Sespie mask));
2538c87b03e5Sespie
2539c87b03e5Sespie /* Otherwise, we are handling the constant case. See if the constant is too
2540c87b03e5Sespie big for the field. Warn and return a tree of for 0 (false) if so. We do
2541c87b03e5Sespie this not only for its own sake, but to avoid having to test for this
2542c87b03e5Sespie error case below. If we didn't, we might generate wrong code.
2543c87b03e5Sespie
2544c87b03e5Sespie For unsigned fields, the constant shifted right by the field length should
2545c87b03e5Sespie be all zero. For signed fields, the high-order bits should agree with
2546c87b03e5Sespie the sign bit. */
2547c87b03e5Sespie
2548c87b03e5Sespie if (lunsignedp)
2549c87b03e5Sespie {
2550c87b03e5Sespie if (! integer_zerop (const_binop (RSHIFT_EXPR,
2551c87b03e5Sespie convert (unsigned_type, rhs),
2552c87b03e5Sespie size_int (lbitsize), 0)))
2553c87b03e5Sespie {
2554c87b03e5Sespie warning ("comparison is always %d due to width of bit-field",
2555c87b03e5Sespie code == NE_EXPR);
2556c87b03e5Sespie return convert (compare_type,
2557c87b03e5Sespie (code == NE_EXPR
2558c87b03e5Sespie ? integer_one_node : integer_zero_node));
2559c87b03e5Sespie }
2560c87b03e5Sespie }
2561c87b03e5Sespie else
2562c87b03e5Sespie {
2563c87b03e5Sespie tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2564c87b03e5Sespie size_int (lbitsize - 1), 0);
2565c87b03e5Sespie if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2566c87b03e5Sespie {
2567c87b03e5Sespie warning ("comparison is always %d due to width of bit-field",
2568c87b03e5Sespie code == NE_EXPR);
2569c87b03e5Sespie return convert (compare_type,
2570c87b03e5Sespie (code == NE_EXPR
2571c87b03e5Sespie ? integer_one_node : integer_zero_node));
2572c87b03e5Sespie }
2573c87b03e5Sespie }
2574c87b03e5Sespie
2575c87b03e5Sespie /* Single-bit compares should always be against zero. */
2576c87b03e5Sespie if (lbitsize == 1 && ! integer_zerop (rhs))
2577c87b03e5Sespie {
2578c87b03e5Sespie code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2579c87b03e5Sespie rhs = convert (type, integer_zero_node);
2580c87b03e5Sespie }
2581c87b03e5Sespie
2582c87b03e5Sespie /* Make a new bitfield reference, shift the constant over the
2583c87b03e5Sespie appropriate number of bits and mask it with the computed mask
2584c87b03e5Sespie (in case this was a signed field). If we changed it, make a new one. */
2585c87b03e5Sespie lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2586c87b03e5Sespie if (lvolatilep)
2587c87b03e5Sespie {
2588c87b03e5Sespie TREE_SIDE_EFFECTS (lhs) = 1;
2589c87b03e5Sespie TREE_THIS_VOLATILE (lhs) = 1;
2590c87b03e5Sespie }
2591c87b03e5Sespie
2592c87b03e5Sespie rhs = fold (const_binop (BIT_AND_EXPR,
2593c87b03e5Sespie const_binop (LSHIFT_EXPR,
2594c87b03e5Sespie convert (unsigned_type, rhs),
2595c87b03e5Sespie size_int (lbitpos), 0),
2596c87b03e5Sespie mask, 0));
2597c87b03e5Sespie
2598c87b03e5Sespie return build (code, compare_type,
2599c87b03e5Sespie build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2600c87b03e5Sespie rhs);
2601c87b03e5Sespie }
2602c87b03e5Sespie
2603c87b03e5Sespie /* Subroutine for fold_truthop: decode a field reference.
2604c87b03e5Sespie
2605c87b03e5Sespie If EXP is a comparison reference, we return the innermost reference.
2606c87b03e5Sespie
2607c87b03e5Sespie *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2608c87b03e5Sespie set to the starting bit number.
2609c87b03e5Sespie
2610c87b03e5Sespie If the innermost field can be completely contained in a mode-sized
2611c87b03e5Sespie unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2612c87b03e5Sespie
2613c87b03e5Sespie *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2614c87b03e5Sespie otherwise it is not changed.
2615c87b03e5Sespie
2616c87b03e5Sespie *PUNSIGNEDP is set to the signedness of the field.
2617c87b03e5Sespie
2618c87b03e5Sespie *PMASK is set to the mask used. This is either contained in a
2619c87b03e5Sespie BIT_AND_EXPR or derived from the width of the field.
2620c87b03e5Sespie
2621c87b03e5Sespie *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2622c87b03e5Sespie
2623c87b03e5Sespie Return 0 if this is not a component reference or is one that we can't
2624c87b03e5Sespie do anything with. */
2625c87b03e5Sespie
2626c87b03e5Sespie static tree
decode_field_reference(exp,pbitsize,pbitpos,pmode,punsignedp,pvolatilep,pmask,pand_mask)2627c87b03e5Sespie decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2628c87b03e5Sespie pvolatilep, pmask, pand_mask)
2629c87b03e5Sespie tree exp;
2630c87b03e5Sespie HOST_WIDE_INT *pbitsize, *pbitpos;
2631c87b03e5Sespie enum machine_mode *pmode;
2632c87b03e5Sespie int *punsignedp, *pvolatilep;
2633c87b03e5Sespie tree *pmask;
2634c87b03e5Sespie tree *pand_mask;
2635c87b03e5Sespie {
2636c87b03e5Sespie tree and_mask = 0;
2637c87b03e5Sespie tree mask, inner, offset;
2638c87b03e5Sespie tree unsigned_type;
2639c87b03e5Sespie unsigned int precision;
2640c87b03e5Sespie
2641c87b03e5Sespie /* All the optimizations using this function assume integer fields.
2642c87b03e5Sespie There are problems with FP fields since the type_for_size call
2643c87b03e5Sespie below can fail for, e.g., XFmode. */
2644c87b03e5Sespie if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2645c87b03e5Sespie return 0;
2646c87b03e5Sespie
2647c87b03e5Sespie STRIP_NOPS (exp);
2648c87b03e5Sespie
2649c87b03e5Sespie if (TREE_CODE (exp) == BIT_AND_EXPR)
2650c87b03e5Sespie {
2651c87b03e5Sespie and_mask = TREE_OPERAND (exp, 1);
2652c87b03e5Sespie exp = TREE_OPERAND (exp, 0);
2653c87b03e5Sespie STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2654c87b03e5Sespie if (TREE_CODE (and_mask) != INTEGER_CST)
2655c87b03e5Sespie return 0;
2656c87b03e5Sespie }
2657c87b03e5Sespie
2658c87b03e5Sespie inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2659c87b03e5Sespie punsignedp, pvolatilep);
2660c87b03e5Sespie if ((inner == exp && and_mask == 0)
2661c87b03e5Sespie || *pbitsize < 0 || offset != 0
2662c87b03e5Sespie || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2663c87b03e5Sespie return 0;
2664c87b03e5Sespie
2665c87b03e5Sespie /* Compute the mask to access the bitfield. */
2666c87b03e5Sespie unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2667c87b03e5Sespie precision = TYPE_PRECISION (unsigned_type);
2668c87b03e5Sespie
2669c87b03e5Sespie mask = build_int_2 (~0, ~0);
2670c87b03e5Sespie TREE_TYPE (mask) = unsigned_type;
2671c87b03e5Sespie force_fit_type (mask, 0);
2672c87b03e5Sespie mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2673c87b03e5Sespie mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2674c87b03e5Sespie
2675c87b03e5Sespie /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2676c87b03e5Sespie if (and_mask != 0)
2677c87b03e5Sespie mask = fold (build (BIT_AND_EXPR, unsigned_type,
2678c87b03e5Sespie convert (unsigned_type, and_mask), mask));
2679c87b03e5Sespie
2680c87b03e5Sespie *pmask = mask;
2681c87b03e5Sespie *pand_mask = and_mask;
2682c87b03e5Sespie return inner;
2683c87b03e5Sespie }
2684c87b03e5Sespie
2685c87b03e5Sespie /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2686c87b03e5Sespie bit positions. */
2687c87b03e5Sespie
2688c87b03e5Sespie static int
all_ones_mask_p(mask,size)2689c87b03e5Sespie all_ones_mask_p (mask, size)
2690c87b03e5Sespie tree mask;
2691c87b03e5Sespie int size;
2692c87b03e5Sespie {
2693c87b03e5Sespie tree type = TREE_TYPE (mask);
2694c87b03e5Sespie unsigned int precision = TYPE_PRECISION (type);
2695c87b03e5Sespie tree tmask;
2696c87b03e5Sespie
2697c87b03e5Sespie tmask = build_int_2 (~0, ~0);
2698c87b03e5Sespie TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2699c87b03e5Sespie force_fit_type (tmask, 0);
2700c87b03e5Sespie return
2701c87b03e5Sespie tree_int_cst_equal (mask,
2702c87b03e5Sespie const_binop (RSHIFT_EXPR,
2703c87b03e5Sespie const_binop (LSHIFT_EXPR, tmask,
2704c87b03e5Sespie size_int (precision - size),
2705c87b03e5Sespie 0),
2706c87b03e5Sespie size_int (precision - size), 0));
2707c87b03e5Sespie }
2708c87b03e5Sespie
2709c87b03e5Sespie /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2710c87b03e5Sespie represents the sign bit of EXP's type. If EXP represents a sign
2711c87b03e5Sespie or zero extension, also test VAL against the unextended type.
2712c87b03e5Sespie The return value is the (sub)expression whose sign bit is VAL,
2713c87b03e5Sespie or NULL_TREE otherwise. */
2714c87b03e5Sespie
2715c87b03e5Sespie static tree
sign_bit_p(exp,val)2716c87b03e5Sespie sign_bit_p (exp, val)
2717c87b03e5Sespie tree exp;
2718c87b03e5Sespie tree val;
2719c87b03e5Sespie {
2720c87b03e5Sespie unsigned HOST_WIDE_INT lo;
2721c87b03e5Sespie HOST_WIDE_INT hi;
2722c87b03e5Sespie int width;
2723c87b03e5Sespie tree t;
2724c87b03e5Sespie
2725c87b03e5Sespie /* Tree EXP must have an integral type. */
2726c87b03e5Sespie t = TREE_TYPE (exp);
2727c87b03e5Sespie if (! INTEGRAL_TYPE_P (t))
2728c87b03e5Sespie return NULL_TREE;
2729c87b03e5Sespie
2730c87b03e5Sespie /* Tree VAL must be an integer constant. */
2731c87b03e5Sespie if (TREE_CODE (val) != INTEGER_CST
2732c87b03e5Sespie || TREE_CONSTANT_OVERFLOW (val))
2733c87b03e5Sespie return NULL_TREE;
2734c87b03e5Sespie
2735c87b03e5Sespie width = TYPE_PRECISION (t);
2736c87b03e5Sespie if (width > HOST_BITS_PER_WIDE_INT)
2737c87b03e5Sespie {
2738c87b03e5Sespie hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2739c87b03e5Sespie lo = 0;
2740c87b03e5Sespie }
2741c87b03e5Sespie else
2742c87b03e5Sespie {
2743c87b03e5Sespie hi = 0;
2744c87b03e5Sespie lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2745c87b03e5Sespie }
2746c87b03e5Sespie
2747c87b03e5Sespie if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2748c87b03e5Sespie return exp;
2749c87b03e5Sespie
2750c87b03e5Sespie /* Handle extension from a narrower type. */
2751c87b03e5Sespie if (TREE_CODE (exp) == NOP_EXPR
2752c87b03e5Sespie && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2753c87b03e5Sespie return sign_bit_p (TREE_OPERAND (exp, 0), val);
2754c87b03e5Sespie
2755c87b03e5Sespie return NULL_TREE;
2756c87b03e5Sespie }
2757c87b03e5Sespie
2758c87b03e5Sespie /* Subroutine for fold_truthop: determine if an operand is simple enough
2759c87b03e5Sespie to be evaluated unconditionally. */
2760c87b03e5Sespie
2761c87b03e5Sespie static int
simple_operand_p(exp)2762c87b03e5Sespie simple_operand_p (exp)
2763c87b03e5Sespie tree exp;
2764c87b03e5Sespie {
2765c87b03e5Sespie /* Strip any conversions that don't change the machine mode. */
2766c87b03e5Sespie while ((TREE_CODE (exp) == NOP_EXPR
2767c87b03e5Sespie || TREE_CODE (exp) == CONVERT_EXPR)
2768c87b03e5Sespie && (TYPE_MODE (TREE_TYPE (exp))
2769c87b03e5Sespie == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2770c87b03e5Sespie exp = TREE_OPERAND (exp, 0);
2771c87b03e5Sespie
2772c87b03e5Sespie return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2773c87b03e5Sespie || (DECL_P (exp)
2774c87b03e5Sespie && ! TREE_ADDRESSABLE (exp)
2775c87b03e5Sespie && ! TREE_THIS_VOLATILE (exp)
2776c87b03e5Sespie && ! DECL_NONLOCAL (exp)
2777c87b03e5Sespie /* Don't regard global variables as simple. They may be
2778c87b03e5Sespie allocated in ways unknown to the compiler (shared memory,
2779c87b03e5Sespie #pragma weak, etc). */
2780c87b03e5Sespie && ! TREE_PUBLIC (exp)
2781c87b03e5Sespie && ! DECL_EXTERNAL (exp)
2782c87b03e5Sespie /* Loading a static variable is unduly expensive, but global
2783c87b03e5Sespie registers aren't expensive. */
2784c87b03e5Sespie && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2785c87b03e5Sespie }
2786c87b03e5Sespie
2787c87b03e5Sespie /* The following functions are subroutines to fold_range_test and allow it to
2788c87b03e5Sespie try to change a logical combination of comparisons into a range test.
2789c87b03e5Sespie
2790c87b03e5Sespie For example, both
2791c87b03e5Sespie X == 2 || X == 3 || X == 4 || X == 5
2792c87b03e5Sespie and
2793c87b03e5Sespie X >= 2 && X <= 5
2794c87b03e5Sespie are converted to
2795c87b03e5Sespie (unsigned) (X - 2) <= 3
2796c87b03e5Sespie
2797c87b03e5Sespie We describe each set of comparisons as being either inside or outside
2798c87b03e5Sespie a range, using a variable named like IN_P, and then describe the
2799c87b03e5Sespie range with a lower and upper bound. If one of the bounds is omitted,
2800c87b03e5Sespie it represents either the highest or lowest value of the type.
2801c87b03e5Sespie
2802c87b03e5Sespie In the comments below, we represent a range by two numbers in brackets
2803c87b03e5Sespie preceded by a "+" to designate being inside that range, or a "-" to
2804c87b03e5Sespie designate being outside that range, so the condition can be inverted by
2805c87b03e5Sespie flipping the prefix. An omitted bound is represented by a "-". For
2806c87b03e5Sespie example, "- [-, 10]" means being outside the range starting at the lowest
2807c87b03e5Sespie possible value and ending at 10, in other words, being greater than 10.
2808c87b03e5Sespie The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2809c87b03e5Sespie always false.
2810c87b03e5Sespie
2811c87b03e5Sespie We set up things so that the missing bounds are handled in a consistent
2812c87b03e5Sespie manner so neither a missing bound nor "true" and "false" need to be
2813c87b03e5Sespie handled using a special case. */
2814c87b03e5Sespie
2815c87b03e5Sespie /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2816c87b03e5Sespie of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2817c87b03e5Sespie and UPPER1_P are nonzero if the respective argument is an upper bound
2818c87b03e5Sespie and zero for a lower. TYPE, if nonzero, is the type of the result; it
2819c87b03e5Sespie must be specified for a comparison. ARG1 will be converted to ARG0's
2820c87b03e5Sespie type if both are specified. */
2821c87b03e5Sespie
2822c87b03e5Sespie static tree
range_binop(code,type,arg0,upper0_p,arg1,upper1_p)2823c87b03e5Sespie range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2824c87b03e5Sespie enum tree_code code;
2825c87b03e5Sespie tree type;
2826c87b03e5Sespie tree arg0, arg1;
2827c87b03e5Sespie int upper0_p, upper1_p;
2828c87b03e5Sespie {
2829c87b03e5Sespie tree tem;
2830c87b03e5Sespie int result;
2831c87b03e5Sespie int sgn0, sgn1;
2832c87b03e5Sespie
2833c87b03e5Sespie /* If neither arg represents infinity, do the normal operation.
2834c87b03e5Sespie Else, if not a comparison, return infinity. Else handle the special
2835c87b03e5Sespie comparison rules. Note that most of the cases below won't occur, but
2836c87b03e5Sespie are handled for consistency. */
2837c87b03e5Sespie
2838c87b03e5Sespie if (arg0 != 0 && arg1 != 0)
2839c87b03e5Sespie {
2840c87b03e5Sespie tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2841c87b03e5Sespie arg0, convert (TREE_TYPE (arg0), arg1)));
2842c87b03e5Sespie STRIP_NOPS (tem);
2843c87b03e5Sespie return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2844c87b03e5Sespie }
2845c87b03e5Sespie
2846c87b03e5Sespie if (TREE_CODE_CLASS (code) != '<')
2847c87b03e5Sespie return 0;
2848c87b03e5Sespie
2849c87b03e5Sespie /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2850c87b03e5Sespie for neither. In real maths, we cannot assume open ended ranges are
2851c87b03e5Sespie the same. But, this is computer arithmetic, where numbers are finite.
2852c87b03e5Sespie We can therefore make the transformation of any unbounded range with
2853c87b03e5Sespie the value Z, Z being greater than any representable number. This permits
2854c87b03e5Sespie us to treat unbounded ranges as equal. */
2855c87b03e5Sespie sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2856c87b03e5Sespie sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2857c87b03e5Sespie switch (code)
2858c87b03e5Sespie {
2859c87b03e5Sespie case EQ_EXPR:
2860c87b03e5Sespie result = sgn0 == sgn1;
2861c87b03e5Sespie break;
2862c87b03e5Sespie case NE_EXPR:
2863c87b03e5Sespie result = sgn0 != sgn1;
2864c87b03e5Sespie break;
2865c87b03e5Sespie case LT_EXPR:
2866c87b03e5Sespie result = sgn0 < sgn1;
2867c87b03e5Sespie break;
2868c87b03e5Sespie case LE_EXPR:
2869c87b03e5Sespie result = sgn0 <= sgn1;
2870c87b03e5Sespie break;
2871c87b03e5Sespie case GT_EXPR:
2872c87b03e5Sespie result = sgn0 > sgn1;
2873c87b03e5Sespie break;
2874c87b03e5Sespie case GE_EXPR:
2875c87b03e5Sespie result = sgn0 >= sgn1;
2876c87b03e5Sespie break;
2877c87b03e5Sespie default:
2878c87b03e5Sespie abort ();
2879c87b03e5Sespie }
2880c87b03e5Sespie
2881c87b03e5Sespie return convert (type, result ? integer_one_node : integer_zero_node);
2882c87b03e5Sespie }
2883c87b03e5Sespie
2884c87b03e5Sespie /* Given EXP, a logical expression, set the range it is testing into
2885c87b03e5Sespie variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2886c87b03e5Sespie actually being tested. *PLOW and *PHIGH will be made of the same type
2887c87b03e5Sespie as the returned expression. If EXP is not a comparison, we will most
2888c87b03e5Sespie likely not be returning a useful value and range. */
2889c87b03e5Sespie
2890c87b03e5Sespie static tree
make_range(exp,pin_p,plow,phigh)2891c87b03e5Sespie make_range (exp, pin_p, plow, phigh)
2892c87b03e5Sespie tree exp;
2893c87b03e5Sespie int *pin_p;
2894c87b03e5Sespie tree *plow, *phigh;
2895c87b03e5Sespie {
2896c87b03e5Sespie enum tree_code code;
2897c87b03e5Sespie tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2898c87b03e5Sespie tree orig_type = NULL_TREE;
2899c87b03e5Sespie int in_p, n_in_p;
2900c87b03e5Sespie tree low, high, n_low, n_high;
2901c87b03e5Sespie
2902c87b03e5Sespie /* Start with simply saying "EXP != 0" and then look at the code of EXP
2903c87b03e5Sespie and see if we can refine the range. Some of the cases below may not
2904c87b03e5Sespie happen, but it doesn't seem worth worrying about this. We "continue"
2905c87b03e5Sespie the outer loop when we've changed something; otherwise we "break"
2906c87b03e5Sespie the switch, which will "break" the while. */
2907c87b03e5Sespie
2908c87b03e5Sespie in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2909c87b03e5Sespie
2910c87b03e5Sespie while (1)
2911c87b03e5Sespie {
2912c87b03e5Sespie code = TREE_CODE (exp);
2913c87b03e5Sespie
2914c87b03e5Sespie if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2915c87b03e5Sespie {
2916c87b03e5Sespie arg0 = TREE_OPERAND (exp, 0);
2917c87b03e5Sespie if (TREE_CODE_CLASS (code) == '<'
2918c87b03e5Sespie || TREE_CODE_CLASS (code) == '1'
2919c87b03e5Sespie || TREE_CODE_CLASS (code) == '2')
2920c87b03e5Sespie type = TREE_TYPE (arg0);
2921c87b03e5Sespie if (TREE_CODE_CLASS (code) == '2'
2922c87b03e5Sespie || TREE_CODE_CLASS (code) == '<'
2923c87b03e5Sespie || (TREE_CODE_CLASS (code) == 'e'
2924c87b03e5Sespie && TREE_CODE_LENGTH (code) > 1))
2925c87b03e5Sespie arg1 = TREE_OPERAND (exp, 1);
2926c87b03e5Sespie }
2927c87b03e5Sespie
2928c87b03e5Sespie /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2929c87b03e5Sespie lose a cast by accident. */
2930c87b03e5Sespie if (type != NULL_TREE && orig_type == NULL_TREE)
2931c87b03e5Sespie orig_type = type;
2932c87b03e5Sespie
2933c87b03e5Sespie switch (code)
2934c87b03e5Sespie {
2935c87b03e5Sespie case TRUTH_NOT_EXPR:
2936c87b03e5Sespie in_p = ! in_p, exp = arg0;
2937c87b03e5Sespie continue;
2938c87b03e5Sespie
2939c87b03e5Sespie case EQ_EXPR: case NE_EXPR:
2940c87b03e5Sespie case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2941c87b03e5Sespie /* We can only do something if the range is testing for zero
2942c87b03e5Sespie and if the second operand is an integer constant. Note that
2943c87b03e5Sespie saying something is "in" the range we make is done by
2944c87b03e5Sespie complementing IN_P since it will set in the initial case of
2945c87b03e5Sespie being not equal to zero; "out" is leaving it alone. */
2946c87b03e5Sespie if (low == 0 || high == 0
2947c87b03e5Sespie || ! integer_zerop (low) || ! integer_zerop (high)
2948c87b03e5Sespie || TREE_CODE (arg1) != INTEGER_CST)
2949c87b03e5Sespie break;
2950c87b03e5Sespie
2951c87b03e5Sespie switch (code)
2952c87b03e5Sespie {
2953c87b03e5Sespie case NE_EXPR: /* - [c, c] */
2954c87b03e5Sespie low = high = arg1;
2955c87b03e5Sespie break;
2956c87b03e5Sespie case EQ_EXPR: /* + [c, c] */
2957c87b03e5Sespie in_p = ! in_p, low = high = arg1;
2958c87b03e5Sespie break;
2959c87b03e5Sespie case GT_EXPR: /* - [-, c] */
2960c87b03e5Sespie low = 0, high = arg1;
2961c87b03e5Sespie break;
2962c87b03e5Sespie case GE_EXPR: /* + [c, -] */
2963c87b03e5Sespie in_p = ! in_p, low = arg1, high = 0;
2964c87b03e5Sespie break;
2965c87b03e5Sespie case LT_EXPR: /* - [c, -] */
2966c87b03e5Sespie low = arg1, high = 0;
2967c87b03e5Sespie break;
2968c87b03e5Sespie case LE_EXPR: /* + [-, c] */
2969c87b03e5Sespie in_p = ! in_p, low = 0, high = arg1;
2970c87b03e5Sespie break;
2971c87b03e5Sespie default:
2972c87b03e5Sespie abort ();
2973c87b03e5Sespie }
2974c87b03e5Sespie
2975c87b03e5Sespie exp = arg0;
2976c87b03e5Sespie
2977c87b03e5Sespie /* If this is an unsigned comparison, we also know that EXP is
2978c87b03e5Sespie greater than or equal to zero. We base the range tests we make
2979c87b03e5Sespie on that fact, so we record it here so we can parse existing
2980c87b03e5Sespie range tests. */
2981c87b03e5Sespie if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2982c87b03e5Sespie {
2983c87b03e5Sespie if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2984c87b03e5Sespie 1, convert (type, integer_zero_node),
2985c87b03e5Sespie NULL_TREE))
2986c87b03e5Sespie break;
2987c87b03e5Sespie
2988c87b03e5Sespie in_p = n_in_p, low = n_low, high = n_high;
2989c87b03e5Sespie
2990c87b03e5Sespie /* If the high bound is missing, but we
2991c87b03e5Sespie have a low bound, reverse the range so
2992c87b03e5Sespie it goes from zero to the low bound minus 1. */
2993c87b03e5Sespie if (high == 0 && low)
2994c87b03e5Sespie {
2995c87b03e5Sespie in_p = ! in_p;
2996c87b03e5Sespie high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2997c87b03e5Sespie integer_one_node, 0);
2998c87b03e5Sespie low = convert (type, integer_zero_node);
2999c87b03e5Sespie }
3000c87b03e5Sespie }
3001c87b03e5Sespie continue;
3002c87b03e5Sespie
3003c87b03e5Sespie case NEGATE_EXPR:
3004c87b03e5Sespie /* (-x) IN [a,b] -> x in [-b, -a] */
3005c87b03e5Sespie n_low = range_binop (MINUS_EXPR, type,
3006c87b03e5Sespie convert (type, integer_zero_node), 0, high, 1);
3007c87b03e5Sespie n_high = range_binop (MINUS_EXPR, type,
3008c87b03e5Sespie convert (type, integer_zero_node), 0, low, 0);
3009c87b03e5Sespie low = n_low, high = n_high;
3010c87b03e5Sespie exp = arg0;
3011c87b03e5Sespie continue;
3012c87b03e5Sespie
3013c87b03e5Sespie case BIT_NOT_EXPR:
3014c87b03e5Sespie /* ~ X -> -X - 1 */
3015c87b03e5Sespie exp = build (MINUS_EXPR, type, negate_expr (arg0),
3016c87b03e5Sespie convert (type, integer_one_node));
3017c87b03e5Sespie continue;
3018c87b03e5Sespie
3019c87b03e5Sespie case PLUS_EXPR: case MINUS_EXPR:
3020c87b03e5Sespie if (TREE_CODE (arg1) != INTEGER_CST)
3021c87b03e5Sespie break;
3022c87b03e5Sespie
3023c87b03e5Sespie /* If EXP is signed, any overflow in the computation is undefined,
3024c87b03e5Sespie so we don't worry about it so long as our computations on
3025c87b03e5Sespie the bounds don't overflow. For unsigned, overflow is defined
3026c87b03e5Sespie and this is exactly the right thing. */
3027c87b03e5Sespie n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3028c87b03e5Sespie type, low, 0, arg1, 0);
3029c87b03e5Sespie n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3030c87b03e5Sespie type, high, 1, arg1, 0);
3031c87b03e5Sespie if ((n_low != 0 && TREE_OVERFLOW (n_low))
3032c87b03e5Sespie || (n_high != 0 && TREE_OVERFLOW (n_high)))
3033c87b03e5Sespie break;
3034c87b03e5Sespie
3035c87b03e5Sespie /* Check for an unsigned range which has wrapped around the maximum
3036c87b03e5Sespie value thus making n_high < n_low, and normalize it. */
3037c87b03e5Sespie if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3038c87b03e5Sespie {
3039c87b03e5Sespie low = range_binop (PLUS_EXPR, type, n_high, 0,
3040c87b03e5Sespie integer_one_node, 0);
3041c87b03e5Sespie high = range_binop (MINUS_EXPR, type, n_low, 0,
3042c87b03e5Sespie integer_one_node, 0);
3043c87b03e5Sespie
3044c87b03e5Sespie /* If the range is of the form +/- [ x+1, x ], we won't
3045c87b03e5Sespie be able to normalize it. But then, it represents the
3046c87b03e5Sespie whole range or the empty set, so make it
3047c87b03e5Sespie +/- [ -, - ]. */
3048c87b03e5Sespie if (tree_int_cst_equal (n_low, low)
3049c87b03e5Sespie && tree_int_cst_equal (n_high, high))
3050c87b03e5Sespie low = high = 0;
3051c87b03e5Sespie else
3052c87b03e5Sespie in_p = ! in_p;
3053c87b03e5Sespie }
3054c87b03e5Sespie else
3055c87b03e5Sespie low = n_low, high = n_high;
3056c87b03e5Sespie
3057c87b03e5Sespie exp = arg0;
3058c87b03e5Sespie continue;
3059c87b03e5Sespie
3060c87b03e5Sespie case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3061c87b03e5Sespie if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3062c87b03e5Sespie break;
3063c87b03e5Sespie
3064c87b03e5Sespie if (! INTEGRAL_TYPE_P (type)
3065c87b03e5Sespie || (low != 0 && ! int_fits_type_p (low, type))
3066c87b03e5Sespie || (high != 0 && ! int_fits_type_p (high, type)))
3067c87b03e5Sespie break;
3068c87b03e5Sespie
3069c87b03e5Sespie n_low = low, n_high = high;
3070c87b03e5Sespie
3071c87b03e5Sespie if (n_low != 0)
3072c87b03e5Sespie n_low = convert (type, n_low);
3073c87b03e5Sespie
3074c87b03e5Sespie if (n_high != 0)
3075c87b03e5Sespie n_high = convert (type, n_high);
3076c87b03e5Sespie
3077c87b03e5Sespie /* If we're converting from an unsigned to a signed type,
3078c87b03e5Sespie we will be doing the comparison as unsigned. The tests above
3079c87b03e5Sespie have already verified that LOW and HIGH are both positive.
3080c87b03e5Sespie
3081c87b03e5Sespie So we have to make sure that the original unsigned value will
3082c87b03e5Sespie be interpreted as positive. */
3083c87b03e5Sespie if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3084c87b03e5Sespie {
3085c87b03e5Sespie tree equiv_type = (*lang_hooks.types.type_for_mode)
3086c87b03e5Sespie (TYPE_MODE (type), 1);
3087c87b03e5Sespie tree high_positive;
3088c87b03e5Sespie
3089c87b03e5Sespie /* A range without an upper bound is, naturally, unbounded.
3090c87b03e5Sespie Since convert would have cropped a very large value, use
3091c87b03e5Sespie the max value for the destination type. */
3092c87b03e5Sespie high_positive
3093c87b03e5Sespie = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3094c87b03e5Sespie : TYPE_MAX_VALUE (type);
3095c87b03e5Sespie
3096c87b03e5Sespie if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3097c87b03e5Sespie high_positive = fold (build (RSHIFT_EXPR, type,
3098c87b03e5Sespie convert (type, high_positive),
3099c87b03e5Sespie convert (type, integer_one_node)));
3100c87b03e5Sespie
3101c87b03e5Sespie /* If the low bound is specified, "and" the range with the
3102c87b03e5Sespie range for which the original unsigned value will be
3103c87b03e5Sespie positive. */
3104c87b03e5Sespie if (low != 0)
3105c87b03e5Sespie {
3106c87b03e5Sespie if (! merge_ranges (&n_in_p, &n_low, &n_high,
3107c87b03e5Sespie 1, n_low, n_high,
3108c87b03e5Sespie 1, convert (type, integer_zero_node),
3109c87b03e5Sespie high_positive))
3110c87b03e5Sespie break;
3111c87b03e5Sespie
3112c87b03e5Sespie in_p = (n_in_p == in_p);
3113c87b03e5Sespie }
3114c87b03e5Sespie else
3115c87b03e5Sespie {
3116c87b03e5Sespie /* Otherwise, "or" the range with the range of the input
3117c87b03e5Sespie that will be interpreted as negative. */
3118c87b03e5Sespie if (! merge_ranges (&n_in_p, &n_low, &n_high,
3119c87b03e5Sespie 0, n_low, n_high,
3120c87b03e5Sespie 1, convert (type, integer_zero_node),
3121c87b03e5Sespie high_positive))
3122c87b03e5Sespie break;
3123c87b03e5Sespie
3124c87b03e5Sespie in_p = (in_p != n_in_p);
3125c87b03e5Sespie }
3126c87b03e5Sespie }
3127c87b03e5Sespie
3128c87b03e5Sespie exp = arg0;
3129c87b03e5Sespie low = n_low, high = n_high;
3130c87b03e5Sespie continue;
3131c87b03e5Sespie
3132c87b03e5Sespie default:
3133c87b03e5Sespie break;
3134c87b03e5Sespie }
3135c87b03e5Sespie
3136c87b03e5Sespie break;
3137c87b03e5Sespie }
3138c87b03e5Sespie
3139c87b03e5Sespie /* If EXP is a constant, we can evaluate whether this is true or false. */
3140c87b03e5Sespie if (TREE_CODE (exp) == INTEGER_CST)
3141c87b03e5Sespie {
3142c87b03e5Sespie in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3143c87b03e5Sespie exp, 0, low, 0))
3144c87b03e5Sespie && integer_onep (range_binop (LE_EXPR, integer_type_node,
3145c87b03e5Sespie exp, 1, high, 1)));
3146c87b03e5Sespie low = high = 0;
3147c87b03e5Sespie exp = 0;
3148c87b03e5Sespie }
3149c87b03e5Sespie
3150c87b03e5Sespie *pin_p = in_p, *plow = low, *phigh = high;
3151c87b03e5Sespie return exp;
3152c87b03e5Sespie }
3153c87b03e5Sespie
3154c87b03e5Sespie /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3155c87b03e5Sespie type, TYPE, return an expression to test if EXP is in (or out of, depending
3156c87b03e5Sespie on IN_P) the range. */
3157c87b03e5Sespie
3158c87b03e5Sespie static tree
build_range_check(type,exp,in_p,low,high)3159c87b03e5Sespie build_range_check (type, exp, in_p, low, high)
3160c87b03e5Sespie tree type;
3161c87b03e5Sespie tree exp;
3162c87b03e5Sespie int in_p;
3163c87b03e5Sespie tree low, high;
3164c87b03e5Sespie {
3165c87b03e5Sespie tree etype = TREE_TYPE (exp);
3166c87b03e5Sespie tree value;
3167c87b03e5Sespie
3168c87b03e5Sespie if (! in_p
3169c87b03e5Sespie && (0 != (value = build_range_check (type, exp, 1, low, high))))
3170c87b03e5Sespie return invert_truthvalue (value);
3171c87b03e5Sespie
3172c87b03e5Sespie if (low == 0 && high == 0)
3173c87b03e5Sespie return convert (type, integer_one_node);
3174c87b03e5Sespie
3175c87b03e5Sespie if (low == 0)
3176c87b03e5Sespie return fold (build (LE_EXPR, type, exp, high));
3177c87b03e5Sespie
3178c87b03e5Sespie if (high == 0)
3179c87b03e5Sespie return fold (build (GE_EXPR, type, exp, low));
3180c87b03e5Sespie
3181c87b03e5Sespie if (operand_equal_p (low, high, 0))
3182c87b03e5Sespie return fold (build (EQ_EXPR, type, exp, low));
3183c87b03e5Sespie
3184c87b03e5Sespie if (integer_zerop (low))
3185c87b03e5Sespie {
3186c87b03e5Sespie if (! TREE_UNSIGNED (etype))
3187c87b03e5Sespie {
3188c87b03e5Sespie etype = (*lang_hooks.types.unsigned_type) (etype);
3189c87b03e5Sespie high = convert (etype, high);
3190c87b03e5Sespie exp = convert (etype, exp);
3191c87b03e5Sespie }
3192c87b03e5Sespie return build_range_check (type, exp, 1, 0, high);
3193c87b03e5Sespie }
3194c87b03e5Sespie
3195c87b03e5Sespie /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3196c87b03e5Sespie if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3197c87b03e5Sespie {
3198c87b03e5Sespie unsigned HOST_WIDE_INT lo;
3199c87b03e5Sespie HOST_WIDE_INT hi;
3200c87b03e5Sespie int prec;
3201c87b03e5Sespie
3202c87b03e5Sespie prec = TYPE_PRECISION (etype);
3203c87b03e5Sespie if (prec <= HOST_BITS_PER_WIDE_INT)
3204c87b03e5Sespie {
3205c87b03e5Sespie hi = 0;
3206c87b03e5Sespie lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3207c87b03e5Sespie }
3208c87b03e5Sespie else
3209c87b03e5Sespie {
3210c87b03e5Sespie hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3211c87b03e5Sespie lo = (unsigned HOST_WIDE_INT) -1;
3212c87b03e5Sespie }
3213c87b03e5Sespie
3214c87b03e5Sespie if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3215c87b03e5Sespie {
3216c87b03e5Sespie if (TREE_UNSIGNED (etype))
3217c87b03e5Sespie {
3218c87b03e5Sespie etype = (*lang_hooks.types.signed_type) (etype);
3219c87b03e5Sespie exp = convert (etype, exp);
3220c87b03e5Sespie }
3221c87b03e5Sespie return fold (build (GT_EXPR, type, exp,
3222c87b03e5Sespie convert (etype, integer_zero_node)));
3223c87b03e5Sespie }
3224c87b03e5Sespie }
3225c87b03e5Sespie
3226c87b03e5Sespie if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3227c87b03e5Sespie && ! TREE_OVERFLOW (value))
3228c87b03e5Sespie return build_range_check (type,
3229c87b03e5Sespie fold (build (MINUS_EXPR, etype, exp, low)),
3230c87b03e5Sespie 1, convert (etype, integer_zero_node), value);
3231c87b03e5Sespie
3232c87b03e5Sespie return 0;
3233c87b03e5Sespie }
3234c87b03e5Sespie
3235c87b03e5Sespie /* Given two ranges, see if we can merge them into one. Return 1 if we
3236c87b03e5Sespie can, 0 if we can't. Set the output range into the specified parameters. */
3237c87b03e5Sespie
3238c87b03e5Sespie static int
merge_ranges(pin_p,plow,phigh,in0_p,low0,high0,in1_p,low1,high1)3239c87b03e5Sespie merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3240c87b03e5Sespie int *pin_p;
3241c87b03e5Sespie tree *plow, *phigh;
3242c87b03e5Sespie int in0_p, in1_p;
3243c87b03e5Sespie tree low0, high0, low1, high1;
3244c87b03e5Sespie {
3245c87b03e5Sespie int no_overlap;
3246c87b03e5Sespie int subset;
3247c87b03e5Sespie int temp;
3248c87b03e5Sespie tree tem;
3249c87b03e5Sespie int in_p;
3250c87b03e5Sespie tree low, high;
3251c87b03e5Sespie int lowequal = ((low0 == 0 && low1 == 0)
3252c87b03e5Sespie || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3253c87b03e5Sespie low0, 0, low1, 0)));
3254c87b03e5Sespie int highequal = ((high0 == 0 && high1 == 0)
3255c87b03e5Sespie || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3256c87b03e5Sespie high0, 1, high1, 1)));
3257c87b03e5Sespie
3258c87b03e5Sespie /* Make range 0 be the range that starts first, or ends last if they
3259c87b03e5Sespie start at the same value. Swap them if it isn't. */
3260c87b03e5Sespie if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3261c87b03e5Sespie low0, 0, low1, 0))
3262c87b03e5Sespie || (lowequal
3263c87b03e5Sespie && integer_onep (range_binop (GT_EXPR, integer_type_node,
3264c87b03e5Sespie high1, 1, high0, 1))))
3265c87b03e5Sespie {
3266c87b03e5Sespie temp = in0_p, in0_p = in1_p, in1_p = temp;
3267c87b03e5Sespie tem = low0, low0 = low1, low1 = tem;
3268c87b03e5Sespie tem = high0, high0 = high1, high1 = tem;
3269c87b03e5Sespie }
3270c87b03e5Sespie
3271c87b03e5Sespie /* Now flag two cases, whether the ranges are disjoint or whether the
3272c87b03e5Sespie second range is totally subsumed in the first. Note that the tests
3273c87b03e5Sespie below are simplified by the ones above. */
3274c87b03e5Sespie no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3275c87b03e5Sespie high0, 1, low1, 0));
3276c87b03e5Sespie subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3277c87b03e5Sespie high1, 1, high0, 1));
3278c87b03e5Sespie
3279c87b03e5Sespie /* We now have four cases, depending on whether we are including or
3280c87b03e5Sespie excluding the two ranges. */
3281c87b03e5Sespie if (in0_p && in1_p)
3282c87b03e5Sespie {
3283c87b03e5Sespie /* If they don't overlap, the result is false. If the second range
3284c87b03e5Sespie is a subset it is the result. Otherwise, the range is from the start
3285c87b03e5Sespie of the second to the end of the first. */
3286c87b03e5Sespie if (no_overlap)
3287c87b03e5Sespie in_p = 0, low = high = 0;
3288c87b03e5Sespie else if (subset)
3289c87b03e5Sespie in_p = 1, low = low1, high = high1;
3290c87b03e5Sespie else
3291c87b03e5Sespie in_p = 1, low = low1, high = high0;
3292c87b03e5Sespie }
3293c87b03e5Sespie
3294c87b03e5Sespie else if (in0_p && ! in1_p)
3295c87b03e5Sespie {
3296c87b03e5Sespie /* If they don't overlap, the result is the first range. If they are
3297c87b03e5Sespie equal, the result is false. If the second range is a subset of the
3298c87b03e5Sespie first, and the ranges begin at the same place, we go from just after
3299c87b03e5Sespie the end of the first range to the end of the second. If the second
3300c87b03e5Sespie range is not a subset of the first, or if it is a subset and both
3301c87b03e5Sespie ranges end at the same place, the range starts at the start of the
3302c87b03e5Sespie first range and ends just before the second range.
3303c87b03e5Sespie Otherwise, we can't describe this as a single range. */
3304c87b03e5Sespie if (no_overlap)
3305c87b03e5Sespie in_p = 1, low = low0, high = high0;
3306c87b03e5Sespie else if (lowequal && highequal)
3307c87b03e5Sespie in_p = 0, low = high = 0;
3308c87b03e5Sespie else if (subset && lowequal)
3309c87b03e5Sespie {
3310c87b03e5Sespie in_p = 1, high = high0;
3311c87b03e5Sespie low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3312c87b03e5Sespie integer_one_node, 0);
3313c87b03e5Sespie }
3314c87b03e5Sespie else if (! subset || highequal)
3315c87b03e5Sespie {
3316c87b03e5Sespie in_p = 1, low = low0;
3317c87b03e5Sespie high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3318c87b03e5Sespie integer_one_node, 0);
3319c87b03e5Sespie }
3320c87b03e5Sespie else
3321c87b03e5Sespie return 0;
3322c87b03e5Sespie }
3323c87b03e5Sespie
3324c87b03e5Sespie else if (! in0_p && in1_p)
3325c87b03e5Sespie {
3326c87b03e5Sespie /* If they don't overlap, the result is the second range. If the second
3327c87b03e5Sespie is a subset of the first, the result is false. Otherwise,
3328c87b03e5Sespie the range starts just after the first range and ends at the
3329c87b03e5Sespie end of the second. */
3330c87b03e5Sespie if (no_overlap)
3331c87b03e5Sespie in_p = 1, low = low1, high = high1;
3332c87b03e5Sespie else if (subset || highequal)
3333c87b03e5Sespie in_p = 0, low = high = 0;
3334c87b03e5Sespie else
3335c87b03e5Sespie {
3336c87b03e5Sespie in_p = 1, high = high1;
3337c87b03e5Sespie low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3338c87b03e5Sespie integer_one_node, 0);
3339c87b03e5Sespie }
3340c87b03e5Sespie }
3341c87b03e5Sespie
3342c87b03e5Sespie else
3343c87b03e5Sespie {
3344c87b03e5Sespie /* The case where we are excluding both ranges. Here the complex case
3345c87b03e5Sespie is if they don't overlap. In that case, the only time we have a
3346c87b03e5Sespie range is if they are adjacent. If the second is a subset of the
3347c87b03e5Sespie first, the result is the first. Otherwise, the range to exclude
3348c87b03e5Sespie starts at the beginning of the first range and ends at the end of the
3349c87b03e5Sespie second. */
3350c87b03e5Sespie if (no_overlap)
3351c87b03e5Sespie {
3352c87b03e5Sespie if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3353c87b03e5Sespie range_binop (PLUS_EXPR, NULL_TREE,
3354c87b03e5Sespie high0, 1,
3355c87b03e5Sespie integer_one_node, 1),
3356c87b03e5Sespie 1, low1, 0)))
3357c87b03e5Sespie in_p = 0, low = low0, high = high1;
3358c87b03e5Sespie else
3359c87b03e5Sespie return 0;
3360c87b03e5Sespie }
3361c87b03e5Sespie else if (subset)
3362c87b03e5Sespie in_p = 0, low = low0, high = high0;
3363c87b03e5Sespie else
3364c87b03e5Sespie in_p = 0, low = low0, high = high1;
3365c87b03e5Sespie }
3366c87b03e5Sespie
3367c87b03e5Sespie *pin_p = in_p, *plow = low, *phigh = high;
3368c87b03e5Sespie return 1;
3369c87b03e5Sespie }
3370c87b03e5Sespie
3371c87b03e5Sespie /* EXP is some logical combination of boolean tests. See if we can
3372c87b03e5Sespie merge it into some range test. Return the new tree if so. */
3373c87b03e5Sespie
3374c87b03e5Sespie static tree
fold_range_test(exp)3375c87b03e5Sespie fold_range_test (exp)
3376c87b03e5Sespie tree exp;
3377c87b03e5Sespie {
3378c87b03e5Sespie int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3379c87b03e5Sespie || TREE_CODE (exp) == TRUTH_OR_EXPR);
3380c87b03e5Sespie int in0_p, in1_p, in_p;
3381c87b03e5Sespie tree low0, low1, low, high0, high1, high;
3382c87b03e5Sespie tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3383c87b03e5Sespie tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3384c87b03e5Sespie tree tem;
3385c87b03e5Sespie
3386c87b03e5Sespie /* If this is an OR operation, invert both sides; we will invert
3387c87b03e5Sespie again at the end. */
3388c87b03e5Sespie if (or_op)
3389c87b03e5Sespie in0_p = ! in0_p, in1_p = ! in1_p;
3390c87b03e5Sespie
3391c87b03e5Sespie /* If both expressions are the same, if we can merge the ranges, and we
3392c87b03e5Sespie can build the range test, return it or it inverted. If one of the
3393c87b03e5Sespie ranges is always true or always false, consider it to be the same
3394c87b03e5Sespie expression as the other. */
3395c87b03e5Sespie if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3396c87b03e5Sespie && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3397c87b03e5Sespie in1_p, low1, high1)
3398c87b03e5Sespie && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3399c87b03e5Sespie lhs != 0 ? lhs
3400c87b03e5Sespie : rhs != 0 ? rhs : integer_zero_node,
3401c87b03e5Sespie in_p, low, high))))
3402c87b03e5Sespie return or_op ? invert_truthvalue (tem) : tem;
3403c87b03e5Sespie
3404c87b03e5Sespie /* On machines where the branch cost is expensive, if this is a
3405c87b03e5Sespie short-circuited branch and the underlying object on both sides
3406c87b03e5Sespie is the same, make a non-short-circuit operation. */
3407c87b03e5Sespie else if (BRANCH_COST >= 2
3408c87b03e5Sespie && lhs != 0 && rhs != 0
3409c87b03e5Sespie && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3410c87b03e5Sespie || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3411c87b03e5Sespie && operand_equal_p (lhs, rhs, 0))
3412c87b03e5Sespie {
3413c87b03e5Sespie /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3414c87b03e5Sespie unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3415c87b03e5Sespie which cases we can't do this. */
3416c87b03e5Sespie if (simple_operand_p (lhs))
3417c87b03e5Sespie return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3418c87b03e5Sespie ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3419c87b03e5Sespie TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3420c87b03e5Sespie TREE_OPERAND (exp, 1));
3421c87b03e5Sespie
3422c87b03e5Sespie else if ((*lang_hooks.decls.global_bindings_p) () == 0
3423c87b03e5Sespie && ! contains_placeholder_p (lhs))
3424c87b03e5Sespie {
3425c87b03e5Sespie tree common = save_expr (lhs);
3426c87b03e5Sespie
3427c87b03e5Sespie if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3428c87b03e5Sespie or_op ? ! in0_p : in0_p,
3429c87b03e5Sespie low0, high0))
3430c87b03e5Sespie && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3431c87b03e5Sespie or_op ? ! in1_p : in1_p,
3432c87b03e5Sespie low1, high1))))
3433c87b03e5Sespie return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3434c87b03e5Sespie ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3435c87b03e5Sespie TREE_TYPE (exp), lhs, rhs);
3436c87b03e5Sespie }
3437c87b03e5Sespie }
3438c87b03e5Sespie
3439c87b03e5Sespie return 0;
3440c87b03e5Sespie }
3441c87b03e5Sespie
3442c87b03e5Sespie /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3443c87b03e5Sespie bit value. Arrange things so the extra bits will be set to zero if and
3444c87b03e5Sespie only if C is signed-extended to its full width. If MASK is nonzero,
3445c87b03e5Sespie it is an INTEGER_CST that should be AND'ed with the extra bits. */
3446c87b03e5Sespie
3447c87b03e5Sespie static tree
unextend(c,p,unsignedp,mask)3448c87b03e5Sespie unextend (c, p, unsignedp, mask)
3449c87b03e5Sespie tree c;
3450c87b03e5Sespie int p;
3451c87b03e5Sespie int unsignedp;
3452c87b03e5Sespie tree mask;
3453c87b03e5Sespie {
3454c87b03e5Sespie tree type = TREE_TYPE (c);
3455c87b03e5Sespie int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3456c87b03e5Sespie tree temp;
3457c87b03e5Sespie
3458c87b03e5Sespie if (p == modesize || unsignedp)
3459c87b03e5Sespie return c;
3460c87b03e5Sespie
3461c87b03e5Sespie /* We work by getting just the sign bit into the low-order bit, then
3462c87b03e5Sespie into the high-order bit, then sign-extend. We then XOR that value
3463c87b03e5Sespie with C. */
3464c87b03e5Sespie temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3465c87b03e5Sespie temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3466c87b03e5Sespie
3467c87b03e5Sespie /* We must use a signed type in order to get an arithmetic right shift.
3468c87b03e5Sespie However, we must also avoid introducing accidental overflows, so that
3469c87b03e5Sespie a subsequent call to integer_zerop will work. Hence we must
3470c87b03e5Sespie do the type conversion here. At this point, the constant is either
3471c87b03e5Sespie zero or one, and the conversion to a signed type can never overflow.
3472c87b03e5Sespie We could get an overflow if this conversion is done anywhere else. */
3473c87b03e5Sespie if (TREE_UNSIGNED (type))
3474c87b03e5Sespie temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3475c87b03e5Sespie
3476c87b03e5Sespie temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3477c87b03e5Sespie temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3478c87b03e5Sespie if (mask != 0)
3479c87b03e5Sespie temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3480c87b03e5Sespie /* If necessary, convert the type back to match the type of C. */
3481c87b03e5Sespie if (TREE_UNSIGNED (type))
3482c87b03e5Sespie temp = convert (type, temp);
3483c87b03e5Sespie
3484c87b03e5Sespie return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3485c87b03e5Sespie }
3486c87b03e5Sespie
3487c87b03e5Sespie /* Find ways of folding logical expressions of LHS and RHS:
3488c87b03e5Sespie Try to merge two comparisons to the same innermost item.
3489c87b03e5Sespie Look for range tests like "ch >= '0' && ch <= '9'".
3490c87b03e5Sespie Look for combinations of simple terms on machines with expensive branches
3491c87b03e5Sespie and evaluate the RHS unconditionally.
3492c87b03e5Sespie
3493c87b03e5Sespie For example, if we have p->a == 2 && p->b == 4 and we can make an
3494c87b03e5Sespie object large enough to span both A and B, we can do this with a comparison
3495c87b03e5Sespie against the object ANDed with the a mask.
3496c87b03e5Sespie
3497c87b03e5Sespie If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3498c87b03e5Sespie operations to do this with one comparison.
3499c87b03e5Sespie
3500c87b03e5Sespie We check for both normal comparisons and the BIT_AND_EXPRs made this by
3501c87b03e5Sespie function and the one above.
3502c87b03e5Sespie
3503c87b03e5Sespie CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3504c87b03e5Sespie TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3505c87b03e5Sespie
3506c87b03e5Sespie TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3507c87b03e5Sespie two operands.
3508c87b03e5Sespie
3509c87b03e5Sespie We return the simplified tree or 0 if no optimization is possible. */
3510c87b03e5Sespie
3511c87b03e5Sespie static tree
fold_truthop(code,truth_type,lhs,rhs)3512c87b03e5Sespie fold_truthop (code, truth_type, lhs, rhs)
3513c87b03e5Sespie enum tree_code code;
3514c87b03e5Sespie tree truth_type, lhs, rhs;
3515c87b03e5Sespie {
3516c87b03e5Sespie /* If this is the "or" of two comparisons, we can do something if
3517c87b03e5Sespie the comparisons are NE_EXPR. If this is the "and", we can do something
3518c87b03e5Sespie if the comparisons are EQ_EXPR. I.e.,
3519c87b03e5Sespie (a->b == 2 && a->c == 4) can become (a->new == NEW).
3520c87b03e5Sespie
3521c87b03e5Sespie WANTED_CODE is this operation code. For single bit fields, we can
3522c87b03e5Sespie convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3523c87b03e5Sespie comparison for one-bit fields. */
3524c87b03e5Sespie
3525c87b03e5Sespie enum tree_code wanted_code;
3526c87b03e5Sespie enum tree_code lcode, rcode;
3527c87b03e5Sespie tree ll_arg, lr_arg, rl_arg, rr_arg;
3528c87b03e5Sespie tree ll_inner, lr_inner, rl_inner, rr_inner;
3529c87b03e5Sespie HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3530c87b03e5Sespie HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3531c87b03e5Sespie HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3532c87b03e5Sespie HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3533c87b03e5Sespie int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3534c87b03e5Sespie enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3535c87b03e5Sespie enum machine_mode lnmode, rnmode;
3536c87b03e5Sespie tree ll_mask, lr_mask, rl_mask, rr_mask;
3537c87b03e5Sespie tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3538c87b03e5Sespie tree l_const, r_const;
3539c87b03e5Sespie tree lntype, rntype, result;
3540c87b03e5Sespie int first_bit, end_bit;
3541c87b03e5Sespie int volatilep;
3542c87b03e5Sespie
3543c87b03e5Sespie /* Start by getting the comparison codes. Fail if anything is volatile.
3544c87b03e5Sespie If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3545c87b03e5Sespie it were surrounded with a NE_EXPR. */
3546c87b03e5Sespie
3547c87b03e5Sespie if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3548c87b03e5Sespie return 0;
3549c87b03e5Sespie
3550c87b03e5Sespie lcode = TREE_CODE (lhs);
3551c87b03e5Sespie rcode = TREE_CODE (rhs);
3552c87b03e5Sespie
3553c87b03e5Sespie if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3554c87b03e5Sespie lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3555c87b03e5Sespie
3556c87b03e5Sespie if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3557c87b03e5Sespie rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3558c87b03e5Sespie
3559c87b03e5Sespie if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3560c87b03e5Sespie return 0;
3561c87b03e5Sespie
3562c87b03e5Sespie code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3563c87b03e5Sespie ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3564c87b03e5Sespie
3565c87b03e5Sespie ll_arg = TREE_OPERAND (lhs, 0);
3566c87b03e5Sespie lr_arg = TREE_OPERAND (lhs, 1);
3567c87b03e5Sespie rl_arg = TREE_OPERAND (rhs, 0);
3568c87b03e5Sespie rr_arg = TREE_OPERAND (rhs, 1);
3569c87b03e5Sespie
3570c87b03e5Sespie /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3571c87b03e5Sespie if (simple_operand_p (ll_arg)
3572c87b03e5Sespie && simple_operand_p (lr_arg)
3573c87b03e5Sespie && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3574c87b03e5Sespie {
3575c87b03e5Sespie int compcode;
3576c87b03e5Sespie
3577c87b03e5Sespie if (operand_equal_p (ll_arg, rl_arg, 0)
3578c87b03e5Sespie && operand_equal_p (lr_arg, rr_arg, 0))
3579c87b03e5Sespie {
3580c87b03e5Sespie int lcompcode, rcompcode;
3581c87b03e5Sespie
3582c87b03e5Sespie lcompcode = comparison_to_compcode (lcode);
3583c87b03e5Sespie rcompcode = comparison_to_compcode (rcode);
3584c87b03e5Sespie compcode = (code == TRUTH_AND_EXPR)
3585c87b03e5Sespie ? lcompcode & rcompcode
3586c87b03e5Sespie : lcompcode | rcompcode;
3587c87b03e5Sespie }
3588c87b03e5Sespie else if (operand_equal_p (ll_arg, rr_arg, 0)
3589c87b03e5Sespie && operand_equal_p (lr_arg, rl_arg, 0))
3590c87b03e5Sespie {
3591c87b03e5Sespie int lcompcode, rcompcode;
3592c87b03e5Sespie
3593c87b03e5Sespie rcode = swap_tree_comparison (rcode);
3594c87b03e5Sespie lcompcode = comparison_to_compcode (lcode);
3595c87b03e5Sespie rcompcode = comparison_to_compcode (rcode);
3596c87b03e5Sespie compcode = (code == TRUTH_AND_EXPR)
3597c87b03e5Sespie ? lcompcode & rcompcode
3598c87b03e5Sespie : lcompcode | rcompcode;
3599c87b03e5Sespie }
3600c87b03e5Sespie else
3601c87b03e5Sespie compcode = -1;
3602c87b03e5Sespie
3603c87b03e5Sespie if (compcode == COMPCODE_TRUE)
3604c87b03e5Sespie return convert (truth_type, integer_one_node);
3605c87b03e5Sespie else if (compcode == COMPCODE_FALSE)
3606c87b03e5Sespie return convert (truth_type, integer_zero_node);
3607c87b03e5Sespie else if (compcode != -1)
3608c87b03e5Sespie return build (compcode_to_comparison (compcode),
3609c87b03e5Sespie truth_type, ll_arg, lr_arg);
3610c87b03e5Sespie }
3611c87b03e5Sespie
3612c87b03e5Sespie /* If the RHS can be evaluated unconditionally and its operands are
3613c87b03e5Sespie simple, it wins to evaluate the RHS unconditionally on machines
3614c87b03e5Sespie with expensive branches. In this case, this isn't a comparison
3615c87b03e5Sespie that can be merged. Avoid doing this if the RHS is a floating-point
3616c87b03e5Sespie comparison since those can trap. */
3617c87b03e5Sespie
3618c87b03e5Sespie if (BRANCH_COST >= 2
3619c87b03e5Sespie && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3620c87b03e5Sespie && simple_operand_p (rl_arg)
3621c87b03e5Sespie && simple_operand_p (rr_arg))
3622c87b03e5Sespie {
3623c87b03e5Sespie /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3624c87b03e5Sespie if (code == TRUTH_OR_EXPR
3625c87b03e5Sespie && lcode == NE_EXPR && integer_zerop (lr_arg)
3626c87b03e5Sespie && rcode == NE_EXPR && integer_zerop (rr_arg)
3627c87b03e5Sespie && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3628c87b03e5Sespie return build (NE_EXPR, truth_type,
3629c87b03e5Sespie build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3630c87b03e5Sespie ll_arg, rl_arg),
3631c87b03e5Sespie integer_zero_node);
3632c87b03e5Sespie
3633c87b03e5Sespie /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3634c87b03e5Sespie if (code == TRUTH_AND_EXPR
3635c87b03e5Sespie && lcode == EQ_EXPR && integer_zerop (lr_arg)
3636c87b03e5Sespie && rcode == EQ_EXPR && integer_zerop (rr_arg)
3637c87b03e5Sespie && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3638c87b03e5Sespie return build (EQ_EXPR, truth_type,
3639c87b03e5Sespie build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3640c87b03e5Sespie ll_arg, rl_arg),
3641c87b03e5Sespie integer_zero_node);
3642c87b03e5Sespie
3643c87b03e5Sespie return build (code, truth_type, lhs, rhs);
3644c87b03e5Sespie }
3645c87b03e5Sespie
3646c87b03e5Sespie /* See if the comparisons can be merged. Then get all the parameters for
3647c87b03e5Sespie each side. */
3648c87b03e5Sespie
3649c87b03e5Sespie if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3650c87b03e5Sespie || (rcode != EQ_EXPR && rcode != NE_EXPR))
3651c87b03e5Sespie return 0;
3652c87b03e5Sespie
3653c87b03e5Sespie volatilep = 0;
3654c87b03e5Sespie ll_inner = decode_field_reference (ll_arg,
3655c87b03e5Sespie &ll_bitsize, &ll_bitpos, &ll_mode,
3656c87b03e5Sespie &ll_unsignedp, &volatilep, &ll_mask,
3657c87b03e5Sespie &ll_and_mask);
3658c87b03e5Sespie lr_inner = decode_field_reference (lr_arg,
3659c87b03e5Sespie &lr_bitsize, &lr_bitpos, &lr_mode,
3660c87b03e5Sespie &lr_unsignedp, &volatilep, &lr_mask,
3661c87b03e5Sespie &lr_and_mask);
3662c87b03e5Sespie rl_inner = decode_field_reference (rl_arg,
3663c87b03e5Sespie &rl_bitsize, &rl_bitpos, &rl_mode,
3664c87b03e5Sespie &rl_unsignedp, &volatilep, &rl_mask,
3665c87b03e5Sespie &rl_and_mask);
3666c87b03e5Sespie rr_inner = decode_field_reference (rr_arg,
3667c87b03e5Sespie &rr_bitsize, &rr_bitpos, &rr_mode,
3668c87b03e5Sespie &rr_unsignedp, &volatilep, &rr_mask,
3669c87b03e5Sespie &rr_and_mask);
3670c87b03e5Sespie
3671c87b03e5Sespie /* It must be true that the inner operation on the lhs of each
3672c87b03e5Sespie comparison must be the same if we are to be able to do anything.
3673c87b03e5Sespie Then see if we have constants. If not, the same must be true for
3674c87b03e5Sespie the rhs's. */
3675c87b03e5Sespie if (volatilep || ll_inner == 0 || rl_inner == 0
3676c87b03e5Sespie || ! operand_equal_p (ll_inner, rl_inner, 0))
3677c87b03e5Sespie return 0;
3678c87b03e5Sespie
3679c87b03e5Sespie if (TREE_CODE (lr_arg) == INTEGER_CST
3680c87b03e5Sespie && TREE_CODE (rr_arg) == INTEGER_CST)
3681c87b03e5Sespie l_const = lr_arg, r_const = rr_arg;
3682c87b03e5Sespie else if (lr_inner == 0 || rr_inner == 0
3683c87b03e5Sespie || ! operand_equal_p (lr_inner, rr_inner, 0))
3684c87b03e5Sespie return 0;
3685c87b03e5Sespie else
3686c87b03e5Sespie l_const = r_const = 0;
3687c87b03e5Sespie
3688c87b03e5Sespie /* If either comparison code is not correct for our logical operation,
3689c87b03e5Sespie fail. However, we can convert a one-bit comparison against zero into
3690c87b03e5Sespie the opposite comparison against that bit being set in the field. */
3691c87b03e5Sespie
3692c87b03e5Sespie wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3693c87b03e5Sespie if (lcode != wanted_code)
3694c87b03e5Sespie {
3695c87b03e5Sespie if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3696c87b03e5Sespie {
3697c87b03e5Sespie /* Make the left operand unsigned, since we are only interested
3698c87b03e5Sespie in the value of one bit. Otherwise we are doing the wrong
3699c87b03e5Sespie thing below. */
3700c87b03e5Sespie ll_unsignedp = 1;
3701c87b03e5Sespie l_const = ll_mask;
3702c87b03e5Sespie }
3703c87b03e5Sespie else
3704c87b03e5Sespie return 0;
3705c87b03e5Sespie }
3706c87b03e5Sespie
3707c87b03e5Sespie /* This is analogous to the code for l_const above. */
3708c87b03e5Sespie if (rcode != wanted_code)
3709c87b03e5Sespie {
3710c87b03e5Sespie if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3711c87b03e5Sespie {
3712c87b03e5Sespie rl_unsignedp = 1;
3713c87b03e5Sespie r_const = rl_mask;
3714c87b03e5Sespie }
3715c87b03e5Sespie else
3716c87b03e5Sespie return 0;
3717c87b03e5Sespie }
3718c87b03e5Sespie
3719c87b03e5Sespie /* After this point all optimizations will generate bit-field
3720c87b03e5Sespie references, which we might not want. */
3721c87b03e5Sespie if (! (*lang_hooks.can_use_bit_fields_p) ())
3722c87b03e5Sespie return 0;
3723c87b03e5Sespie
3724c87b03e5Sespie /* See if we can find a mode that contains both fields being compared on
3725c87b03e5Sespie the left. If we can't, fail. Otherwise, update all constants and masks
3726c87b03e5Sespie to be relative to a field of that size. */
3727c87b03e5Sespie first_bit = MIN (ll_bitpos, rl_bitpos);
3728c87b03e5Sespie end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3729c87b03e5Sespie lnmode = get_best_mode (end_bit - first_bit, first_bit,
3730c87b03e5Sespie TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3731c87b03e5Sespie volatilep);
3732c87b03e5Sespie if (lnmode == VOIDmode)
3733c87b03e5Sespie return 0;
3734c87b03e5Sespie
3735c87b03e5Sespie lnbitsize = GET_MODE_BITSIZE (lnmode);
3736c87b03e5Sespie lnbitpos = first_bit & ~ (lnbitsize - 1);
3737c87b03e5Sespie lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3738c87b03e5Sespie xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3739c87b03e5Sespie
3740c87b03e5Sespie if (BYTES_BIG_ENDIAN)
3741c87b03e5Sespie {
3742c87b03e5Sespie xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3743c87b03e5Sespie xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3744c87b03e5Sespie }
3745c87b03e5Sespie
3746c87b03e5Sespie ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3747c87b03e5Sespie size_int (xll_bitpos), 0);
3748c87b03e5Sespie rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3749c87b03e5Sespie size_int (xrl_bitpos), 0);
3750c87b03e5Sespie
3751c87b03e5Sespie if (l_const)
3752c87b03e5Sespie {
3753c87b03e5Sespie l_const = convert (lntype, l_const);
3754c87b03e5Sespie l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3755c87b03e5Sespie l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3756c87b03e5Sespie if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3757c87b03e5Sespie fold (build1 (BIT_NOT_EXPR,
3758c87b03e5Sespie lntype, ll_mask)),
3759c87b03e5Sespie 0)))
3760c87b03e5Sespie {
3761c87b03e5Sespie warning ("comparison is always %d", wanted_code == NE_EXPR);
3762c87b03e5Sespie
3763c87b03e5Sespie return convert (truth_type,
3764c87b03e5Sespie wanted_code == NE_EXPR
3765c87b03e5Sespie ? integer_one_node : integer_zero_node);
3766c87b03e5Sespie }
3767c87b03e5Sespie }
3768c87b03e5Sespie if (r_const)
3769c87b03e5Sespie {
3770c87b03e5Sespie r_const = convert (lntype, r_const);
3771c87b03e5Sespie r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3772c87b03e5Sespie r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3773c87b03e5Sespie if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3774c87b03e5Sespie fold (build1 (BIT_NOT_EXPR,
3775c87b03e5Sespie lntype, rl_mask)),
3776c87b03e5Sespie 0)))
3777c87b03e5Sespie {
3778c87b03e5Sespie warning ("comparison is always %d", wanted_code == NE_EXPR);
3779c87b03e5Sespie
3780c87b03e5Sespie return convert (truth_type,
3781c87b03e5Sespie wanted_code == NE_EXPR
3782c87b03e5Sespie ? integer_one_node : integer_zero_node);
3783c87b03e5Sespie }
3784c87b03e5Sespie }
3785c87b03e5Sespie
3786c87b03e5Sespie /* If the right sides are not constant, do the same for it. Also,
3787c87b03e5Sespie disallow this optimization if a size or signedness mismatch occurs
3788c87b03e5Sespie between the left and right sides. */
3789c87b03e5Sespie if (l_const == 0)
3790c87b03e5Sespie {
3791c87b03e5Sespie if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3792c87b03e5Sespie || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3793c87b03e5Sespie /* Make sure the two fields on the right
3794c87b03e5Sespie correspond to the left without being swapped. */
3795c87b03e5Sespie || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3796c87b03e5Sespie return 0;
3797c87b03e5Sespie
3798c87b03e5Sespie first_bit = MIN (lr_bitpos, rr_bitpos);
3799c87b03e5Sespie end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3800c87b03e5Sespie rnmode = get_best_mode (end_bit - first_bit, first_bit,
3801c87b03e5Sespie TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3802c87b03e5Sespie volatilep);
3803c87b03e5Sespie if (rnmode == VOIDmode)
3804c87b03e5Sespie return 0;
3805c87b03e5Sespie
3806c87b03e5Sespie rnbitsize = GET_MODE_BITSIZE (rnmode);
3807c87b03e5Sespie rnbitpos = first_bit & ~ (rnbitsize - 1);
3808c87b03e5Sespie rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3809c87b03e5Sespie xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3810c87b03e5Sespie
3811c87b03e5Sespie if (BYTES_BIG_ENDIAN)
3812c87b03e5Sespie {
3813c87b03e5Sespie xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3814c87b03e5Sespie xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3815c87b03e5Sespie }
3816c87b03e5Sespie
3817c87b03e5Sespie lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3818c87b03e5Sespie size_int (xlr_bitpos), 0);
3819c87b03e5Sespie rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3820c87b03e5Sespie size_int (xrr_bitpos), 0);
3821c87b03e5Sespie
3822c87b03e5Sespie /* Make a mask that corresponds to both fields being compared.
3823c87b03e5Sespie Do this for both items being compared. If the operands are the
3824c87b03e5Sespie same size and the bits being compared are in the same position
3825c87b03e5Sespie then we can do this by masking both and comparing the masked
3826c87b03e5Sespie results. */
3827c87b03e5Sespie ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3828c87b03e5Sespie lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3829c87b03e5Sespie if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3830c87b03e5Sespie {
3831c87b03e5Sespie lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3832c87b03e5Sespie ll_unsignedp || rl_unsignedp);
3833c87b03e5Sespie if (! all_ones_mask_p (ll_mask, lnbitsize))
3834c87b03e5Sespie lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3835c87b03e5Sespie
3836c87b03e5Sespie rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3837c87b03e5Sespie lr_unsignedp || rr_unsignedp);
3838c87b03e5Sespie if (! all_ones_mask_p (lr_mask, rnbitsize))
3839c87b03e5Sespie rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3840c87b03e5Sespie
3841c87b03e5Sespie return build (wanted_code, truth_type, lhs, rhs);
3842c87b03e5Sespie }
3843c87b03e5Sespie
3844c87b03e5Sespie /* There is still another way we can do something: If both pairs of
3845c87b03e5Sespie fields being compared are adjacent, we may be able to make a wider
3846c87b03e5Sespie field containing them both.
3847c87b03e5Sespie
3848c87b03e5Sespie Note that we still must mask the lhs/rhs expressions. Furthermore,
3849c87b03e5Sespie the mask must be shifted to account for the shift done by
3850c87b03e5Sespie make_bit_field_ref. */
3851c87b03e5Sespie if ((ll_bitsize + ll_bitpos == rl_bitpos
3852c87b03e5Sespie && lr_bitsize + lr_bitpos == rr_bitpos)
3853c87b03e5Sespie || (ll_bitpos == rl_bitpos + rl_bitsize
3854c87b03e5Sespie && lr_bitpos == rr_bitpos + rr_bitsize))
3855c87b03e5Sespie {
3856c87b03e5Sespie tree type;
3857c87b03e5Sespie
3858c87b03e5Sespie lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3859c87b03e5Sespie MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3860c87b03e5Sespie rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3861c87b03e5Sespie MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3862c87b03e5Sespie
3863c87b03e5Sespie ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3864c87b03e5Sespie size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3865c87b03e5Sespie lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3866c87b03e5Sespie size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3867c87b03e5Sespie
3868c87b03e5Sespie /* Convert to the smaller type before masking out unwanted bits. */
3869c87b03e5Sespie type = lntype;
3870c87b03e5Sespie if (lntype != rntype)
3871c87b03e5Sespie {
3872c87b03e5Sespie if (lnbitsize > rnbitsize)
3873c87b03e5Sespie {
3874c87b03e5Sespie lhs = convert (rntype, lhs);
3875c87b03e5Sespie ll_mask = convert (rntype, ll_mask);
3876c87b03e5Sespie type = rntype;
3877c87b03e5Sespie }
3878c87b03e5Sespie else if (lnbitsize < rnbitsize)
3879c87b03e5Sespie {
3880c87b03e5Sespie rhs = convert (lntype, rhs);
3881c87b03e5Sespie lr_mask = convert (lntype, lr_mask);
3882c87b03e5Sespie type = lntype;
3883c87b03e5Sespie }
3884c87b03e5Sespie }
3885c87b03e5Sespie
3886c87b03e5Sespie if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3887c87b03e5Sespie lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3888c87b03e5Sespie
3889c87b03e5Sespie if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3890c87b03e5Sespie rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3891c87b03e5Sespie
3892c87b03e5Sespie return build (wanted_code, truth_type, lhs, rhs);
3893c87b03e5Sespie }
3894c87b03e5Sespie
3895c87b03e5Sespie return 0;
3896c87b03e5Sespie }
3897c87b03e5Sespie
3898c87b03e5Sespie /* Handle the case of comparisons with constants. If there is something in
3899c87b03e5Sespie common between the masks, those bits of the constants must be the same.
3900c87b03e5Sespie If not, the condition is always false. Test for this to avoid generating
3901c87b03e5Sespie incorrect code below. */
3902c87b03e5Sespie result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3903c87b03e5Sespie if (! integer_zerop (result)
3904c87b03e5Sespie && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3905c87b03e5Sespie const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3906c87b03e5Sespie {
3907c87b03e5Sespie if (wanted_code == NE_EXPR)
3908c87b03e5Sespie {
3909c87b03e5Sespie warning ("`or' of unmatched not-equal tests is always 1");
3910c87b03e5Sespie return convert (truth_type, integer_one_node);
3911c87b03e5Sespie }
3912c87b03e5Sespie else
3913c87b03e5Sespie {
3914c87b03e5Sespie warning ("`and' of mutually exclusive equal-tests is always 0");
3915c87b03e5Sespie return convert (truth_type, integer_zero_node);
3916c87b03e5Sespie }
3917c87b03e5Sespie }
3918c87b03e5Sespie
3919c87b03e5Sespie /* Construct the expression we will return. First get the component
3920c87b03e5Sespie reference we will make. Unless the mask is all ones the width of
3921c87b03e5Sespie that field, perform the mask operation. Then compare with the
3922c87b03e5Sespie merged constant. */
3923c87b03e5Sespie result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3924c87b03e5Sespie ll_unsignedp || rl_unsignedp);
3925c87b03e5Sespie
3926c87b03e5Sespie ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3927c87b03e5Sespie if (! all_ones_mask_p (ll_mask, lnbitsize))
3928c87b03e5Sespie result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3929c87b03e5Sespie
3930c87b03e5Sespie return build (wanted_code, truth_type, result,
3931c87b03e5Sespie const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3932c87b03e5Sespie }
3933c87b03e5Sespie
3934c87b03e5Sespie /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3935c87b03e5Sespie constant. */
3936c87b03e5Sespie
3937c87b03e5Sespie static tree
optimize_minmax_comparison(t)3938c87b03e5Sespie optimize_minmax_comparison (t)
3939c87b03e5Sespie tree t;
3940c87b03e5Sespie {
3941c87b03e5Sespie tree type = TREE_TYPE (t);
3942c87b03e5Sespie tree arg0 = TREE_OPERAND (t, 0);
3943c87b03e5Sespie enum tree_code op_code;
3944c87b03e5Sespie tree comp_const = TREE_OPERAND (t, 1);
3945c87b03e5Sespie tree minmax_const;
3946c87b03e5Sespie int consts_equal, consts_lt;
3947c87b03e5Sespie tree inner;
3948c87b03e5Sespie
3949c87b03e5Sespie STRIP_SIGN_NOPS (arg0);
3950c87b03e5Sespie
3951c87b03e5Sespie op_code = TREE_CODE (arg0);
3952c87b03e5Sespie minmax_const = TREE_OPERAND (arg0, 1);
3953c87b03e5Sespie consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3954c87b03e5Sespie consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3955c87b03e5Sespie inner = TREE_OPERAND (arg0, 0);
3956c87b03e5Sespie
3957c87b03e5Sespie /* If something does not permit us to optimize, return the original tree. */
3958c87b03e5Sespie if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3959c87b03e5Sespie || TREE_CODE (comp_const) != INTEGER_CST
3960c87b03e5Sespie || TREE_CONSTANT_OVERFLOW (comp_const)
3961c87b03e5Sespie || TREE_CODE (minmax_const) != INTEGER_CST
3962c87b03e5Sespie || TREE_CONSTANT_OVERFLOW (minmax_const))
3963c87b03e5Sespie return t;
3964c87b03e5Sespie
3965c87b03e5Sespie /* Now handle all the various comparison codes. We only handle EQ_EXPR
3966c87b03e5Sespie and GT_EXPR, doing the rest with recursive calls using logical
3967c87b03e5Sespie simplifications. */
3968c87b03e5Sespie switch (TREE_CODE (t))
3969c87b03e5Sespie {
3970c87b03e5Sespie case NE_EXPR: case LT_EXPR: case LE_EXPR:
3971c87b03e5Sespie return
3972c87b03e5Sespie invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3973c87b03e5Sespie
3974c87b03e5Sespie case GE_EXPR:
3975c87b03e5Sespie return
3976c87b03e5Sespie fold (build (TRUTH_ORIF_EXPR, type,
3977c87b03e5Sespie optimize_minmax_comparison
3978c87b03e5Sespie (build (EQ_EXPR, type, arg0, comp_const)),
3979c87b03e5Sespie optimize_minmax_comparison
3980c87b03e5Sespie (build (GT_EXPR, type, arg0, comp_const))));
3981c87b03e5Sespie
3982c87b03e5Sespie case EQ_EXPR:
3983c87b03e5Sespie if (op_code == MAX_EXPR && consts_equal)
3984c87b03e5Sespie /* MAX (X, 0) == 0 -> X <= 0 */
3985c87b03e5Sespie return fold (build (LE_EXPR, type, inner, comp_const));
3986c87b03e5Sespie
3987c87b03e5Sespie else if (op_code == MAX_EXPR && consts_lt)
3988c87b03e5Sespie /* MAX (X, 0) == 5 -> X == 5 */
3989c87b03e5Sespie return fold (build (EQ_EXPR, type, inner, comp_const));
3990c87b03e5Sespie
3991c87b03e5Sespie else if (op_code == MAX_EXPR)
3992c87b03e5Sespie /* MAX (X, 0) == -1 -> false */
3993c87b03e5Sespie return omit_one_operand (type, integer_zero_node, inner);
3994c87b03e5Sespie
3995c87b03e5Sespie else if (consts_equal)
3996c87b03e5Sespie /* MIN (X, 0) == 0 -> X >= 0 */
3997c87b03e5Sespie return fold (build (GE_EXPR, type, inner, comp_const));
3998c87b03e5Sespie
3999c87b03e5Sespie else if (consts_lt)
4000c87b03e5Sespie /* MIN (X, 0) == 5 -> false */
4001c87b03e5Sespie return omit_one_operand (type, integer_zero_node, inner);
4002c87b03e5Sespie
4003c87b03e5Sespie else
4004c87b03e5Sespie /* MIN (X, 0) == -1 -> X == -1 */
4005c87b03e5Sespie return fold (build (EQ_EXPR, type, inner, comp_const));
4006c87b03e5Sespie
4007c87b03e5Sespie case GT_EXPR:
4008c87b03e5Sespie if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4009c87b03e5Sespie /* MAX (X, 0) > 0 -> X > 0
4010c87b03e5Sespie MAX (X, 0) > 5 -> X > 5 */
4011c87b03e5Sespie return fold (build (GT_EXPR, type, inner, comp_const));
4012c87b03e5Sespie
4013c87b03e5Sespie else if (op_code == MAX_EXPR)
4014c87b03e5Sespie /* MAX (X, 0) > -1 -> true */
4015c87b03e5Sespie return omit_one_operand (type, integer_one_node, inner);
4016c87b03e5Sespie
4017c87b03e5Sespie else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4018c87b03e5Sespie /* MIN (X, 0) > 0 -> false
4019c87b03e5Sespie MIN (X, 0) > 5 -> false */
4020c87b03e5Sespie return omit_one_operand (type, integer_zero_node, inner);
4021c87b03e5Sespie
4022c87b03e5Sespie else
4023c87b03e5Sespie /* MIN (X, 0) > -1 -> X > -1 */
4024c87b03e5Sespie return fold (build (GT_EXPR, type, inner, comp_const));
4025c87b03e5Sespie
4026c87b03e5Sespie default:
4027c87b03e5Sespie return t;
4028c87b03e5Sespie }
4029c87b03e5Sespie }
4030c87b03e5Sespie
4031c87b03e5Sespie /* T is an integer expression that is being multiplied, divided, or taken a
4032c87b03e5Sespie modulus (CODE says which and what kind of divide or modulus) by a
4033c87b03e5Sespie constant C. See if we can eliminate that operation by folding it with
4034c87b03e5Sespie other operations already in T. WIDE_TYPE, if non-null, is a type that
4035c87b03e5Sespie should be used for the computation if wider than our type.
4036c87b03e5Sespie
4037c87b03e5Sespie For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4038c87b03e5Sespie (X * 2) + (Y * 4). We must, however, be assured that either the original
4039c87b03e5Sespie expression would not overflow or that overflow is undefined for the type
4040c87b03e5Sespie in the language in question.
4041c87b03e5Sespie
4042c87b03e5Sespie We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4043c87b03e5Sespie the machine has a multiply-accumulate insn or that this is part of an
4044c87b03e5Sespie addressing calculation.
4045c87b03e5Sespie
4046c87b03e5Sespie If we return a non-null expression, it is an equivalent form of the
4047c87b03e5Sespie original computation, but need not be in the original type. */
4048c87b03e5Sespie
4049c87b03e5Sespie static tree
extract_muldiv(t,c,code,wide_type)4050c87b03e5Sespie extract_muldiv (t, c, code, wide_type)
4051c87b03e5Sespie tree t;
4052c87b03e5Sespie tree c;
4053c87b03e5Sespie enum tree_code code;
4054c87b03e5Sespie tree wide_type;
4055c87b03e5Sespie {
4056c87b03e5Sespie /* To avoid exponential search depth, refuse to allow recursion past
4057c87b03e5Sespie three levels. Beyond that (1) it's highly unlikely that we'll find
4058c87b03e5Sespie something interesting and (2) we've probably processed it before
4059c87b03e5Sespie when we built the inner expression. */
4060c87b03e5Sespie
4061c87b03e5Sespie static int depth;
4062c87b03e5Sespie tree ret;
4063c87b03e5Sespie
4064c87b03e5Sespie if (depth > 3)
4065c87b03e5Sespie return NULL;
4066c87b03e5Sespie
4067c87b03e5Sespie depth++;
4068c87b03e5Sespie ret = extract_muldiv_1 (t, c, code, wide_type);
4069c87b03e5Sespie depth--;
4070c87b03e5Sespie
4071c87b03e5Sespie return ret;
4072c87b03e5Sespie }
4073c87b03e5Sespie
4074c87b03e5Sespie static tree
extract_muldiv_1(t,c,code,wide_type)4075c87b03e5Sespie extract_muldiv_1 (t, c, code, wide_type)
4076c87b03e5Sespie tree t;
4077c87b03e5Sespie tree c;
4078c87b03e5Sespie enum tree_code code;
4079c87b03e5Sespie tree wide_type;
4080c87b03e5Sespie {
4081c87b03e5Sespie tree type = TREE_TYPE (t);
4082c87b03e5Sespie enum tree_code tcode = TREE_CODE (t);
4083c87b03e5Sespie tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4084c87b03e5Sespie > GET_MODE_SIZE (TYPE_MODE (type)))
4085c87b03e5Sespie ? wide_type : type);
4086c87b03e5Sespie tree t1, t2;
4087c87b03e5Sespie int same_p = tcode == code;
4088c87b03e5Sespie tree op0 = NULL_TREE, op1 = NULL_TREE;
4089c87b03e5Sespie
4090c87b03e5Sespie /* Don't deal with constants of zero here; they confuse the code below. */
4091c87b03e5Sespie if (integer_zerop (c))
4092c87b03e5Sespie return NULL_TREE;
4093c87b03e5Sespie
4094c87b03e5Sespie if (TREE_CODE_CLASS (tcode) == '1')
4095c87b03e5Sespie op0 = TREE_OPERAND (t, 0);
4096c87b03e5Sespie
4097c87b03e5Sespie if (TREE_CODE_CLASS (tcode) == '2')
4098c87b03e5Sespie op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4099c87b03e5Sespie
4100c87b03e5Sespie /* Note that we need not handle conditional operations here since fold
4101c87b03e5Sespie already handles those cases. So just do arithmetic here. */
4102c87b03e5Sespie switch (tcode)
4103c87b03e5Sespie {
4104c87b03e5Sespie case INTEGER_CST:
4105c87b03e5Sespie /* For a constant, we can always simplify if we are a multiply
4106c87b03e5Sespie or (for divide and modulus) if it is a multiple of our constant. */
4107c87b03e5Sespie if (code == MULT_EXPR
4108c87b03e5Sespie || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4109c87b03e5Sespie return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4110c87b03e5Sespie break;
4111c87b03e5Sespie
4112c87b03e5Sespie case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4113c87b03e5Sespie /* If op0 is an expression ... */
4114c87b03e5Sespie if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4115c87b03e5Sespie || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4116c87b03e5Sespie || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4117c87b03e5Sespie || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4118c87b03e5Sespie /* ... and is unsigned, and its type is smaller than ctype,
4119c87b03e5Sespie then we cannot pass through as widening. */
4120c87b03e5Sespie && ((TREE_UNSIGNED (TREE_TYPE (op0))
4121c87b03e5Sespie && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4122c87b03e5Sespie && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4123c87b03e5Sespie && (GET_MODE_SIZE (TYPE_MODE (ctype))
4124c87b03e5Sespie > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4125*06dc6460Sespie /* ... or this is a truncation (t is narrower than op0),
4126*06dc6460Sespie then we cannot pass through this narrowing. */
4127*06dc6460Sespie || (GET_MODE_SIZE (TYPE_MODE (type))
4128c87b03e5Sespie < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4129c87b03e5Sespie /* ... or signedness changes for division or modulus,
4130c87b03e5Sespie then we cannot pass through this conversion. */
4131c87b03e5Sespie || (code != MULT_EXPR
4132c87b03e5Sespie && (TREE_UNSIGNED (ctype)
4133c87b03e5Sespie != TREE_UNSIGNED (TREE_TYPE (op0))))))
4134c87b03e5Sespie break;
4135c87b03e5Sespie
4136c87b03e5Sespie /* Pass the constant down and see if we can make a simplification. If
4137c87b03e5Sespie we can, replace this expression with the inner simplification for
4138c87b03e5Sespie possible later conversion to our or some other type. */
4139c87b03e5Sespie if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4140c87b03e5Sespie code == MULT_EXPR ? ctype : NULL_TREE)))
4141c87b03e5Sespie return t1;
4142c87b03e5Sespie break;
4143c87b03e5Sespie
4144c87b03e5Sespie case NEGATE_EXPR: case ABS_EXPR:
4145c87b03e5Sespie if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4146c87b03e5Sespie return fold (build1 (tcode, ctype, convert (ctype, t1)));
4147c87b03e5Sespie break;
4148c87b03e5Sespie
4149c87b03e5Sespie case MIN_EXPR: case MAX_EXPR:
4150c87b03e5Sespie /* If widening the type changes the signedness, then we can't perform
4151c87b03e5Sespie this optimization as that changes the result. */
4152c87b03e5Sespie if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4153c87b03e5Sespie break;
4154c87b03e5Sespie
4155c87b03e5Sespie /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4156c87b03e5Sespie if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4157c87b03e5Sespie && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4158c87b03e5Sespie {
4159c87b03e5Sespie if (tree_int_cst_sgn (c) < 0)
4160c87b03e5Sespie tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4161c87b03e5Sespie
4162c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, t1),
4163c87b03e5Sespie convert (ctype, t2)));
4164c87b03e5Sespie }
4165c87b03e5Sespie break;
4166c87b03e5Sespie
4167c87b03e5Sespie case WITH_RECORD_EXPR:
4168c87b03e5Sespie if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4169c87b03e5Sespie return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4170c87b03e5Sespie TREE_OPERAND (t, 1));
4171c87b03e5Sespie break;
4172c87b03e5Sespie
4173c87b03e5Sespie case LSHIFT_EXPR: case RSHIFT_EXPR:
4174c87b03e5Sespie /* If the second operand is constant, this is a multiplication
4175c87b03e5Sespie or floor division, by a power of two, so we can treat it that
4176c87b03e5Sespie way unless the multiplier or divisor overflows. */
4177c87b03e5Sespie if (TREE_CODE (op1) == INTEGER_CST
4178c87b03e5Sespie /* const_binop may not detect overflow correctly,
4179c87b03e5Sespie so check for it explicitly here. */
4180c87b03e5Sespie && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4181c87b03e5Sespie && TREE_INT_CST_HIGH (op1) == 0
4182c87b03e5Sespie && 0 != (t1 = convert (ctype,
4183c87b03e5Sespie const_binop (LSHIFT_EXPR, size_one_node,
4184c87b03e5Sespie op1, 0)))
4185c87b03e5Sespie && ! TREE_OVERFLOW (t1))
4186c87b03e5Sespie return extract_muldiv (build (tcode == LSHIFT_EXPR
4187c87b03e5Sespie ? MULT_EXPR : FLOOR_DIV_EXPR,
4188c87b03e5Sespie ctype, convert (ctype, op0), t1),
4189c87b03e5Sespie c, code, wide_type);
4190c87b03e5Sespie break;
4191c87b03e5Sespie
4192c87b03e5Sespie case PLUS_EXPR: case MINUS_EXPR:
4193c87b03e5Sespie /* See if we can eliminate the operation on both sides. If we can, we
4194c87b03e5Sespie can return a new PLUS or MINUS. If we can't, the only remaining
4195c87b03e5Sespie cases where we can do anything are if the second operand is a
4196c87b03e5Sespie constant. */
4197c87b03e5Sespie t1 = extract_muldiv (op0, c, code, wide_type);
4198c87b03e5Sespie t2 = extract_muldiv (op1, c, code, wide_type);
4199c87b03e5Sespie if (t1 != 0 && t2 != 0
4200c87b03e5Sespie && (code == MULT_EXPR
4201c87b03e5Sespie /* If not multiplication, we can only do this if both operands
4202c87b03e5Sespie are divisible by c. */
4203c87b03e5Sespie || (multiple_of_p (ctype, op0, c)
4204c87b03e5Sespie && multiple_of_p (ctype, op1, c))))
4205c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, t1),
4206c87b03e5Sespie convert (ctype, t2)));
4207c87b03e5Sespie
4208c87b03e5Sespie /* If this was a subtraction, negate OP1 and set it to be an addition.
4209c87b03e5Sespie This simplifies the logic below. */
4210c87b03e5Sespie if (tcode == MINUS_EXPR)
4211c87b03e5Sespie tcode = PLUS_EXPR, op1 = negate_expr (op1);
4212c87b03e5Sespie
4213c87b03e5Sespie if (TREE_CODE (op1) != INTEGER_CST)
4214c87b03e5Sespie break;
4215c87b03e5Sespie
4216c87b03e5Sespie /* If either OP1 or C are negative, this optimization is not safe for
4217c87b03e5Sespie some of the division and remainder types while for others we need
4218c87b03e5Sespie to change the code. */
4219c87b03e5Sespie if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4220c87b03e5Sespie {
4221c87b03e5Sespie if (code == CEIL_DIV_EXPR)
4222c87b03e5Sespie code = FLOOR_DIV_EXPR;
4223c87b03e5Sespie else if (code == FLOOR_DIV_EXPR)
4224c87b03e5Sespie code = CEIL_DIV_EXPR;
4225c87b03e5Sespie else if (code != MULT_EXPR
4226c87b03e5Sespie && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4227c87b03e5Sespie break;
4228c87b03e5Sespie }
4229c87b03e5Sespie
4230c87b03e5Sespie /* If it's a multiply or a division/modulus operation of a multiple
4231c87b03e5Sespie of our constant, do the operation and verify it doesn't overflow. */
4232c87b03e5Sespie if (code == MULT_EXPR
4233c87b03e5Sespie || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4234c87b03e5Sespie {
4235c87b03e5Sespie op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4236c87b03e5Sespie if (op1 == 0 || TREE_OVERFLOW (op1))
4237c87b03e5Sespie break;
4238c87b03e5Sespie }
4239c87b03e5Sespie else
4240c87b03e5Sespie break;
4241c87b03e5Sespie
4242c87b03e5Sespie /* If we have an unsigned type is not a sizetype, we cannot widen
4243c87b03e5Sespie the operation since it will change the result if the original
4244c87b03e5Sespie computation overflowed. */
4245c87b03e5Sespie if (TREE_UNSIGNED (ctype)
4246c87b03e5Sespie && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4247c87b03e5Sespie && ctype != type)
4248c87b03e5Sespie break;
4249c87b03e5Sespie
4250c87b03e5Sespie /* If we were able to eliminate our operation from the first side,
4251c87b03e5Sespie apply our operation to the second side and reform the PLUS. */
4252c87b03e5Sespie if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4253c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, t1), op1));
4254c87b03e5Sespie
4255c87b03e5Sespie /* The last case is if we are a multiply. In that case, we can
4256c87b03e5Sespie apply the distributive law to commute the multiply and addition
4257c87b03e5Sespie if the multiplication of the constants doesn't overflow. */
4258c87b03e5Sespie if (code == MULT_EXPR)
4259c87b03e5Sespie return fold (build (tcode, ctype, fold (build (code, ctype,
4260c87b03e5Sespie convert (ctype, op0),
4261c87b03e5Sespie convert (ctype, c))),
4262c87b03e5Sespie op1));
4263c87b03e5Sespie
4264c87b03e5Sespie break;
4265c87b03e5Sespie
4266c87b03e5Sespie case MULT_EXPR:
4267c87b03e5Sespie /* We have a special case here if we are doing something like
4268c87b03e5Sespie (C * 8) % 4 since we know that's zero. */
4269c87b03e5Sespie if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4270c87b03e5Sespie || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4271c87b03e5Sespie && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4272c87b03e5Sespie && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4273c87b03e5Sespie return omit_one_operand (type, integer_zero_node, op0);
4274c87b03e5Sespie
4275c87b03e5Sespie /* ... fall through ... */
4276c87b03e5Sespie
4277c87b03e5Sespie case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4278c87b03e5Sespie case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4279c87b03e5Sespie /* If we can extract our operation from the LHS, do so and return a
4280c87b03e5Sespie new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4281c87b03e5Sespie do something only if the second operand is a constant. */
4282c87b03e5Sespie if (same_p
4283c87b03e5Sespie && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4284c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, t1),
4285c87b03e5Sespie convert (ctype, op1)));
4286c87b03e5Sespie else if (tcode == MULT_EXPR && code == MULT_EXPR
4287c87b03e5Sespie && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4288c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, op0),
4289c87b03e5Sespie convert (ctype, t1)));
4290c87b03e5Sespie else if (TREE_CODE (op1) != INTEGER_CST)
4291c87b03e5Sespie return 0;
4292c87b03e5Sespie
4293c87b03e5Sespie /* If these are the same operation types, we can associate them
4294c87b03e5Sespie assuming no overflow. */
4295c87b03e5Sespie if (tcode == code
4296c87b03e5Sespie && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4297c87b03e5Sespie convert (ctype, c), 0))
4298c87b03e5Sespie && ! TREE_OVERFLOW (t1))
4299c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, op0), t1));
4300c87b03e5Sespie
4301c87b03e5Sespie /* If these operations "cancel" each other, we have the main
4302c87b03e5Sespie optimizations of this pass, which occur when either constant is a
4303c87b03e5Sespie multiple of the other, in which case we replace this with either an
4304c87b03e5Sespie operation or CODE or TCODE.
4305c87b03e5Sespie
4306c87b03e5Sespie If we have an unsigned type that is not a sizetype, we cannot do
4307c87b03e5Sespie this since it will change the result if the original computation
4308c87b03e5Sespie overflowed. */
4309c87b03e5Sespie if ((! TREE_UNSIGNED (ctype)
4310c87b03e5Sespie || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4311c87b03e5Sespie && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4312c87b03e5Sespie || (tcode == MULT_EXPR
4313c87b03e5Sespie && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4314c87b03e5Sespie && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4315c87b03e5Sespie {
4316c87b03e5Sespie if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4317c87b03e5Sespie return fold (build (tcode, ctype, convert (ctype, op0),
4318c87b03e5Sespie convert (ctype,
4319c87b03e5Sespie const_binop (TRUNC_DIV_EXPR,
4320c87b03e5Sespie op1, c, 0))));
4321c87b03e5Sespie else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4322c87b03e5Sespie return fold (build (code, ctype, convert (ctype, op0),
4323c87b03e5Sespie convert (ctype,
4324c87b03e5Sespie const_binop (TRUNC_DIV_EXPR,
4325c87b03e5Sespie c, op1, 0))));
4326c87b03e5Sespie }
4327c87b03e5Sespie break;
4328c87b03e5Sespie
4329c87b03e5Sespie default:
4330c87b03e5Sespie break;
4331c87b03e5Sespie }
4332c87b03e5Sespie
4333c87b03e5Sespie return 0;
4334c87b03e5Sespie }
4335c87b03e5Sespie
4336c87b03e5Sespie /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4337c87b03e5Sespie S, a SAVE_EXPR, return the expression actually being evaluated. Note
4338c87b03e5Sespie that we may sometimes modify the tree. */
4339c87b03e5Sespie
4340c87b03e5Sespie static tree
strip_compound_expr(t,s)4341c87b03e5Sespie strip_compound_expr (t, s)
4342c87b03e5Sespie tree t;
4343c87b03e5Sespie tree s;
4344c87b03e5Sespie {
4345c87b03e5Sespie enum tree_code code = TREE_CODE (t);
4346c87b03e5Sespie
4347c87b03e5Sespie /* See if this is the COMPOUND_EXPR we want to eliminate. */
4348c87b03e5Sespie if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4349c87b03e5Sespie && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4350c87b03e5Sespie return TREE_OPERAND (t, 1);
4351c87b03e5Sespie
4352c87b03e5Sespie /* See if this is a COND_EXPR or a simple arithmetic operator. We
4353c87b03e5Sespie don't bother handling any other types. */
4354c87b03e5Sespie else if (code == COND_EXPR)
4355c87b03e5Sespie {
4356c87b03e5Sespie TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4357c87b03e5Sespie TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4358c87b03e5Sespie TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4359c87b03e5Sespie }
4360c87b03e5Sespie else if (TREE_CODE_CLASS (code) == '1')
4361c87b03e5Sespie TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4362c87b03e5Sespie else if (TREE_CODE_CLASS (code) == '<'
4363c87b03e5Sespie || TREE_CODE_CLASS (code) == '2')
4364c87b03e5Sespie {
4365c87b03e5Sespie TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4366c87b03e5Sespie TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4367c87b03e5Sespie }
4368c87b03e5Sespie
4369c87b03e5Sespie return t;
4370c87b03e5Sespie }
4371c87b03e5Sespie
4372c87b03e5Sespie /* Return a node which has the indicated constant VALUE (either 0 or
4373c87b03e5Sespie 1), and is of the indicated TYPE. */
4374c87b03e5Sespie
4375c87b03e5Sespie static tree
constant_boolean_node(value,type)4376c87b03e5Sespie constant_boolean_node (value, type)
4377c87b03e5Sespie int value;
4378c87b03e5Sespie tree type;
4379c87b03e5Sespie {
4380c87b03e5Sespie if (type == integer_type_node)
4381c87b03e5Sespie return value ? integer_one_node : integer_zero_node;
4382c87b03e5Sespie else if (TREE_CODE (type) == BOOLEAN_TYPE)
4383c87b03e5Sespie return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4384c87b03e5Sespie integer_zero_node);
4385c87b03e5Sespie else
4386c87b03e5Sespie {
4387c87b03e5Sespie tree t = build_int_2 (value, 0);
4388c87b03e5Sespie
4389c87b03e5Sespie TREE_TYPE (t) = type;
4390c87b03e5Sespie return t;
4391c87b03e5Sespie }
4392c87b03e5Sespie }
4393c87b03e5Sespie
4394c87b03e5Sespie /* Utility function for the following routine, to see how complex a nesting of
4395c87b03e5Sespie COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4396c87b03e5Sespie we don't care (to avoid spending too much time on complex expressions.). */
4397c87b03e5Sespie
4398c87b03e5Sespie static int
count_cond(expr,lim)4399c87b03e5Sespie count_cond (expr, lim)
4400c87b03e5Sespie tree expr;
4401c87b03e5Sespie int lim;
4402c87b03e5Sespie {
4403c87b03e5Sespie int ctrue, cfalse;
4404c87b03e5Sespie
4405c87b03e5Sespie if (TREE_CODE (expr) != COND_EXPR)
4406c87b03e5Sespie return 0;
4407c87b03e5Sespie else if (lim <= 0)
4408c87b03e5Sespie return 0;
4409c87b03e5Sespie
4410c87b03e5Sespie ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4411c87b03e5Sespie cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4412c87b03e5Sespie return MIN (lim, 1 + ctrue + cfalse);
4413c87b03e5Sespie }
4414c87b03e5Sespie
4415c87b03e5Sespie /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4416c87b03e5Sespie Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4417c87b03e5Sespie CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4418c87b03e5Sespie expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4419c87b03e5Sespie COND is the first argument to CODE; otherwise (as in the example
4420c87b03e5Sespie given here), it is the second argument. TYPE is the type of the
4421c87b03e5Sespie original expression. */
4422c87b03e5Sespie
4423c87b03e5Sespie static tree
fold_binary_op_with_conditional_arg(code,type,cond,arg,cond_first_p)4424c87b03e5Sespie fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4425c87b03e5Sespie enum tree_code code;
4426c87b03e5Sespie tree type;
4427c87b03e5Sespie tree cond;
4428c87b03e5Sespie tree arg;
4429c87b03e5Sespie int cond_first_p;
4430c87b03e5Sespie {
4431c87b03e5Sespie tree test, true_value, false_value;
4432c87b03e5Sespie tree lhs = NULL_TREE;
4433c87b03e5Sespie tree rhs = NULL_TREE;
4434c87b03e5Sespie /* In the end, we'll produce a COND_EXPR. Both arms of the
4435c87b03e5Sespie conditional expression will be binary operations. The left-hand
4436c87b03e5Sespie side of the expression to be executed if the condition is true
4437c87b03e5Sespie will be pointed to by TRUE_LHS. Similarly, the right-hand side
4438c87b03e5Sespie of the expression to be executed if the condition is true will be
4439c87b03e5Sespie pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4440c87b03e5Sespie but apply to the expression to be executed if the conditional is
4441c87b03e5Sespie false. */
4442c87b03e5Sespie tree *true_lhs;
4443c87b03e5Sespie tree *true_rhs;
4444c87b03e5Sespie tree *false_lhs;
4445c87b03e5Sespie tree *false_rhs;
4446c87b03e5Sespie /* These are the codes to use for the left-hand side and right-hand
4447c87b03e5Sespie side of the COND_EXPR. Normally, they are the same as CODE. */
4448c87b03e5Sespie enum tree_code lhs_code = code;
4449c87b03e5Sespie enum tree_code rhs_code = code;
4450c87b03e5Sespie /* And these are the types of the expressions. */
4451c87b03e5Sespie tree lhs_type = type;
4452c87b03e5Sespie tree rhs_type = type;
4453c87b03e5Sespie int save = 0;
4454c87b03e5Sespie
4455c87b03e5Sespie if (cond_first_p)
4456c87b03e5Sespie {
4457c87b03e5Sespie true_rhs = false_rhs = &arg;
4458c87b03e5Sespie true_lhs = &true_value;
4459c87b03e5Sespie false_lhs = &false_value;
4460c87b03e5Sespie }
4461c87b03e5Sespie else
4462c87b03e5Sespie {
4463c87b03e5Sespie true_lhs = false_lhs = &arg;
4464c87b03e5Sespie true_rhs = &true_value;
4465c87b03e5Sespie false_rhs = &false_value;
4466c87b03e5Sespie }
4467c87b03e5Sespie
4468c87b03e5Sespie if (TREE_CODE (cond) == COND_EXPR)
4469c87b03e5Sespie {
4470c87b03e5Sespie test = TREE_OPERAND (cond, 0);
4471c87b03e5Sespie true_value = TREE_OPERAND (cond, 1);
4472c87b03e5Sespie false_value = TREE_OPERAND (cond, 2);
4473c87b03e5Sespie /* If this operand throws an expression, then it does not make
4474c87b03e5Sespie sense to try to perform a logical or arithmetic operation
4475c87b03e5Sespie involving it. Instead of building `a + throw 3' for example,
4476c87b03e5Sespie we simply build `a, throw 3'. */
4477c87b03e5Sespie if (VOID_TYPE_P (TREE_TYPE (true_value)))
4478c87b03e5Sespie {
4479c87b03e5Sespie if (! cond_first_p)
4480c87b03e5Sespie {
4481c87b03e5Sespie lhs_code = COMPOUND_EXPR;
4482c87b03e5Sespie lhs_type = void_type_node;
4483c87b03e5Sespie }
4484c87b03e5Sespie else
4485c87b03e5Sespie lhs = true_value;
4486c87b03e5Sespie }
4487c87b03e5Sespie if (VOID_TYPE_P (TREE_TYPE (false_value)))
4488c87b03e5Sespie {
4489c87b03e5Sespie if (! cond_first_p)
4490c87b03e5Sespie {
4491c87b03e5Sespie rhs_code = COMPOUND_EXPR;
4492c87b03e5Sespie rhs_type = void_type_node;
4493c87b03e5Sespie }
4494c87b03e5Sespie else
4495c87b03e5Sespie rhs = false_value;
4496c87b03e5Sespie }
4497c87b03e5Sespie }
4498c87b03e5Sespie else
4499c87b03e5Sespie {
4500c87b03e5Sespie tree testtype = TREE_TYPE (cond);
4501c87b03e5Sespie test = cond;
4502c87b03e5Sespie true_value = convert (testtype, integer_one_node);
4503c87b03e5Sespie false_value = convert (testtype, integer_zero_node);
4504c87b03e5Sespie }
4505c87b03e5Sespie
4506c87b03e5Sespie /* If ARG is complex we want to make sure we only evaluate
4507c87b03e5Sespie it once. Though this is only required if it is volatile, it
4508c87b03e5Sespie might be more efficient even if it is not. However, if we
4509c87b03e5Sespie succeed in folding one part to a constant, we do not need
4510c87b03e5Sespie to make this SAVE_EXPR. Since we do this optimization
4511c87b03e5Sespie primarily to see if we do end up with constant and this
4512c87b03e5Sespie SAVE_EXPR interferes with later optimizations, suppressing
4513c87b03e5Sespie it when we can is important.
4514c87b03e5Sespie
4515c87b03e5Sespie If we are not in a function, we can't make a SAVE_EXPR, so don't
4516c87b03e5Sespie try to do so. Don't try to see if the result is a constant
4517c87b03e5Sespie if an arm is a COND_EXPR since we get exponential behavior
4518c87b03e5Sespie in that case. */
4519c87b03e5Sespie
4520c87b03e5Sespie if (TREE_CODE (arg) == SAVE_EXPR)
4521c87b03e5Sespie save = 1;
4522c87b03e5Sespie else if (lhs == 0 && rhs == 0
4523c87b03e5Sespie && !TREE_CONSTANT (arg)
4524c87b03e5Sespie && (*lang_hooks.decls.global_bindings_p) () == 0
4525c87b03e5Sespie && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4526c87b03e5Sespie || TREE_SIDE_EFFECTS (arg)))
4527c87b03e5Sespie {
4528c87b03e5Sespie if (TREE_CODE (true_value) != COND_EXPR)
4529c87b03e5Sespie lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4530c87b03e5Sespie
4531c87b03e5Sespie if (TREE_CODE (false_value) != COND_EXPR)
4532c87b03e5Sespie rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4533c87b03e5Sespie
4534c87b03e5Sespie if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4535c87b03e5Sespie && (rhs == 0 || !TREE_CONSTANT (rhs)))
4536c87b03e5Sespie {
4537c87b03e5Sespie arg = save_expr (arg);
4538c87b03e5Sespie lhs = rhs = 0;
4539c87b03e5Sespie save = 1;
4540c87b03e5Sespie }
4541c87b03e5Sespie }
4542c87b03e5Sespie
4543c87b03e5Sespie if (lhs == 0)
4544c87b03e5Sespie lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4545c87b03e5Sespie if (rhs == 0)
4546c87b03e5Sespie rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4547c87b03e5Sespie
4548c87b03e5Sespie test = fold (build (COND_EXPR, type, test, lhs, rhs));
4549c87b03e5Sespie
4550c87b03e5Sespie if (save)
4551c87b03e5Sespie return build (COMPOUND_EXPR, type,
4552c87b03e5Sespie convert (void_type_node, arg),
4553c87b03e5Sespie strip_compound_expr (test, arg));
4554c87b03e5Sespie else
4555c87b03e5Sespie return convert (type, test);
4556c87b03e5Sespie }
4557c87b03e5Sespie
4558c87b03e5Sespie
4559c87b03e5Sespie /* Subroutine of fold() that checks for the addition of +/- 0.0.
4560c87b03e5Sespie
4561c87b03e5Sespie If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4562c87b03e5Sespie TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4563c87b03e5Sespie ADDEND is the same as X.
4564c87b03e5Sespie
4565c87b03e5Sespie X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4566c87b03e5Sespie and finite. The problematic cases are when X is zero, and its mode
4567c87b03e5Sespie has signed zeros. In the case of rounding towards -infinity,
4568c87b03e5Sespie X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4569c87b03e5Sespie modes, X + 0 is not the same as X because -0 + 0 is 0. */
4570c87b03e5Sespie
4571c87b03e5Sespie static bool
fold_real_zero_addition_p(type,addend,negate)4572c87b03e5Sespie fold_real_zero_addition_p (type, addend, negate)
4573c87b03e5Sespie tree type, addend;
4574c87b03e5Sespie int negate;
4575c87b03e5Sespie {
4576c87b03e5Sespie if (!real_zerop (addend))
4577c87b03e5Sespie return false;
4578c87b03e5Sespie
4579c87b03e5Sespie /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4580c87b03e5Sespie if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4581c87b03e5Sespie return true;
4582c87b03e5Sespie
4583c87b03e5Sespie /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4584c87b03e5Sespie if (TREE_CODE (addend) == REAL_CST
4585c87b03e5Sespie && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4586c87b03e5Sespie negate = !negate;
4587c87b03e5Sespie
4588c87b03e5Sespie /* The mode has signed zeros, and we have to honor their sign.
4589c87b03e5Sespie In this situation, there is only one case we can return true for.
4590c87b03e5Sespie X - 0 is the same as X unless rounding towards -infinity is
4591c87b03e5Sespie supported. */
4592c87b03e5Sespie return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4593c87b03e5Sespie }
4594c87b03e5Sespie
4595c87b03e5Sespie
4596c87b03e5Sespie /* Perform constant folding and related simplification of EXPR.
4597c87b03e5Sespie The related simplifications include x*1 => x, x*0 => 0, etc.,
4598c87b03e5Sespie and application of the associative law.
4599c87b03e5Sespie NOP_EXPR conversions may be removed freely (as long as we
4600c87b03e5Sespie are careful not to change the C type of the overall expression)
4601c87b03e5Sespie We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4602c87b03e5Sespie but we can constant-fold them if they have constant operands. */
4603c87b03e5Sespie
4604c87b03e5Sespie tree
fold(expr)4605c87b03e5Sespie fold (expr)
4606c87b03e5Sespie tree expr;
4607c87b03e5Sespie {
4608c87b03e5Sespie tree t = expr;
4609c87b03e5Sespie tree t1 = NULL_TREE;
4610c87b03e5Sespie tree tem;
4611c87b03e5Sespie tree type = TREE_TYPE (expr);
4612c87b03e5Sespie tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4613c87b03e5Sespie enum tree_code code = TREE_CODE (t);
4614c87b03e5Sespie int kind = TREE_CODE_CLASS (code);
4615c87b03e5Sespie int invert;
4616c87b03e5Sespie /* WINS will be nonzero when the switch is done
4617c87b03e5Sespie if all operands are constant. */
4618c87b03e5Sespie int wins = 1;
4619c87b03e5Sespie
4620c87b03e5Sespie /* Don't try to process an RTL_EXPR since its operands aren't trees.
4621c87b03e5Sespie Likewise for a SAVE_EXPR that's already been evaluated. */
4622c87b03e5Sespie if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4623c87b03e5Sespie return t;
4624c87b03e5Sespie
4625c87b03e5Sespie /* Return right away if a constant. */
4626c87b03e5Sespie if (kind == 'c')
4627c87b03e5Sespie return t;
4628c87b03e5Sespie
4629c87b03e5Sespie #ifdef MAX_INTEGER_COMPUTATION_MODE
4630c87b03e5Sespie check_max_integer_computation_mode (expr);
4631c87b03e5Sespie #endif
4632c87b03e5Sespie
4633c87b03e5Sespie if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4634c87b03e5Sespie {
4635c87b03e5Sespie tree subop;
4636c87b03e5Sespie
4637c87b03e5Sespie /* Special case for conversion ops that can have fixed point args. */
4638c87b03e5Sespie arg0 = TREE_OPERAND (t, 0);
4639c87b03e5Sespie
4640c87b03e5Sespie /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4641c87b03e5Sespie if (arg0 != 0)
4642c87b03e5Sespie STRIP_SIGN_NOPS (arg0);
4643c87b03e5Sespie
4644c87b03e5Sespie if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4645c87b03e5Sespie subop = TREE_REALPART (arg0);
4646c87b03e5Sespie else
4647c87b03e5Sespie subop = arg0;
4648c87b03e5Sespie
4649c87b03e5Sespie if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4650c87b03e5Sespie && TREE_CODE (subop) != REAL_CST
4651c87b03e5Sespie )
4652c87b03e5Sespie /* Note that TREE_CONSTANT isn't enough:
4653c87b03e5Sespie static var addresses are constant but we can't
4654c87b03e5Sespie do arithmetic on them. */
4655c87b03e5Sespie wins = 0;
4656c87b03e5Sespie }
4657c87b03e5Sespie else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4658c87b03e5Sespie {
4659c87b03e5Sespie int len = first_rtl_op (code);
4660c87b03e5Sespie int i;
4661c87b03e5Sespie for (i = 0; i < len; i++)
4662c87b03e5Sespie {
4663c87b03e5Sespie tree op = TREE_OPERAND (t, i);
4664c87b03e5Sespie tree subop;
4665c87b03e5Sespie
4666c87b03e5Sespie if (op == 0)
4667c87b03e5Sespie continue; /* Valid for CALL_EXPR, at least. */
4668c87b03e5Sespie
4669c87b03e5Sespie if (kind == '<' || code == RSHIFT_EXPR)
4670c87b03e5Sespie {
4671c87b03e5Sespie /* Signedness matters here. Perhaps we can refine this
4672c87b03e5Sespie later. */
4673c87b03e5Sespie STRIP_SIGN_NOPS (op);
4674c87b03e5Sespie }
4675c87b03e5Sespie else
4676c87b03e5Sespie /* Strip any conversions that don't change the mode. */
4677c87b03e5Sespie STRIP_NOPS (op);
4678c87b03e5Sespie
4679c87b03e5Sespie if (TREE_CODE (op) == COMPLEX_CST)
4680c87b03e5Sespie subop = TREE_REALPART (op);
4681c87b03e5Sespie else
4682c87b03e5Sespie subop = op;
4683c87b03e5Sespie
4684c87b03e5Sespie if (TREE_CODE (subop) != INTEGER_CST
4685c87b03e5Sespie && TREE_CODE (subop) != REAL_CST)
4686c87b03e5Sespie /* Note that TREE_CONSTANT isn't enough:
4687c87b03e5Sespie static var addresses are constant but we can't
4688c87b03e5Sespie do arithmetic on them. */
4689c87b03e5Sespie wins = 0;
4690c87b03e5Sespie
4691c87b03e5Sespie if (i == 0)
4692c87b03e5Sespie arg0 = op;
4693c87b03e5Sespie else if (i == 1)
4694c87b03e5Sespie arg1 = op;
4695c87b03e5Sespie }
4696c87b03e5Sespie }
4697c87b03e5Sespie
4698c87b03e5Sespie /* If this is a commutative operation, and ARG0 is a constant, move it
4699c87b03e5Sespie to ARG1 to reduce the number of tests below. */
4700c87b03e5Sespie if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4701c87b03e5Sespie || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4702c87b03e5Sespie || code == BIT_AND_EXPR)
4703c87b03e5Sespie && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4704c87b03e5Sespie {
4705c87b03e5Sespie tem = arg0; arg0 = arg1; arg1 = tem;
4706c87b03e5Sespie
4707c87b03e5Sespie tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4708c87b03e5Sespie TREE_OPERAND (t, 1) = tem;
4709c87b03e5Sespie }
4710c87b03e5Sespie
4711c87b03e5Sespie /* Now WINS is set as described above,
4712c87b03e5Sespie ARG0 is the first operand of EXPR,
4713c87b03e5Sespie and ARG1 is the second operand (if it has more than one operand).
4714c87b03e5Sespie
4715c87b03e5Sespie First check for cases where an arithmetic operation is applied to a
4716c87b03e5Sespie compound, conditional, or comparison operation. Push the arithmetic
4717c87b03e5Sespie operation inside the compound or conditional to see if any folding
4718c87b03e5Sespie can then be done. Convert comparison to conditional for this purpose.
4719c87b03e5Sespie The also optimizes non-constant cases that used to be done in
4720c87b03e5Sespie expand_expr.
4721c87b03e5Sespie
4722c87b03e5Sespie Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4723c87b03e5Sespie one of the operands is a comparison and the other is a comparison, a
4724c87b03e5Sespie BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4725c87b03e5Sespie code below would make the expression more complex. Change it to a
4726c87b03e5Sespie TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4727c87b03e5Sespie TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4728c87b03e5Sespie
4729c87b03e5Sespie if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4730c87b03e5Sespie || code == EQ_EXPR || code == NE_EXPR)
4731c87b03e5Sespie && ((truth_value_p (TREE_CODE (arg0))
4732c87b03e5Sespie && (truth_value_p (TREE_CODE (arg1))
4733c87b03e5Sespie || (TREE_CODE (arg1) == BIT_AND_EXPR
4734c87b03e5Sespie && integer_onep (TREE_OPERAND (arg1, 1)))))
4735c87b03e5Sespie || (truth_value_p (TREE_CODE (arg1))
4736c87b03e5Sespie && (truth_value_p (TREE_CODE (arg0))
4737c87b03e5Sespie || (TREE_CODE (arg0) == BIT_AND_EXPR
4738c87b03e5Sespie && integer_onep (TREE_OPERAND (arg0, 1)))))))
4739c87b03e5Sespie {
4740c87b03e5Sespie t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4741c87b03e5Sespie : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4742c87b03e5Sespie : TRUTH_XOR_EXPR,
4743c87b03e5Sespie type, arg0, arg1));
4744c87b03e5Sespie
4745c87b03e5Sespie if (code == EQ_EXPR)
4746c87b03e5Sespie t = invert_truthvalue (t);
4747c87b03e5Sespie
4748c87b03e5Sespie return t;
4749c87b03e5Sespie }
4750c87b03e5Sespie
4751c87b03e5Sespie if (TREE_CODE_CLASS (code) == '1')
4752c87b03e5Sespie {
4753c87b03e5Sespie if (TREE_CODE (arg0) == COMPOUND_EXPR)
4754c87b03e5Sespie return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4755c87b03e5Sespie fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4756c87b03e5Sespie else if (TREE_CODE (arg0) == COND_EXPR)
4757c87b03e5Sespie {
4758c87b03e5Sespie tree arg01 = TREE_OPERAND (arg0, 1);
4759c87b03e5Sespie tree arg02 = TREE_OPERAND (arg0, 2);
4760c87b03e5Sespie if (! VOID_TYPE_P (TREE_TYPE (arg01)))
4761c87b03e5Sespie arg01 = fold (build1 (code, type, arg01));
4762c87b03e5Sespie if (! VOID_TYPE_P (TREE_TYPE (arg02)))
4763c87b03e5Sespie arg02 = fold (build1 (code, type, arg02));
4764c87b03e5Sespie t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4765c87b03e5Sespie arg01, arg02));
4766c87b03e5Sespie
4767c87b03e5Sespie /* If this was a conversion, and all we did was to move into
4768c87b03e5Sespie inside the COND_EXPR, bring it back out. But leave it if
4769c87b03e5Sespie it is a conversion from integer to integer and the
4770c87b03e5Sespie result precision is no wider than a word since such a
4771c87b03e5Sespie conversion is cheap and may be optimized away by combine,
4772c87b03e5Sespie while it couldn't if it were outside the COND_EXPR. Then return
4773c87b03e5Sespie so we don't get into an infinite recursion loop taking the
4774c87b03e5Sespie conversion out and then back in. */
4775c87b03e5Sespie
4776c87b03e5Sespie if ((code == NOP_EXPR || code == CONVERT_EXPR
4777c87b03e5Sespie || code == NON_LVALUE_EXPR)
4778c87b03e5Sespie && TREE_CODE (t) == COND_EXPR
4779c87b03e5Sespie && TREE_CODE (TREE_OPERAND (t, 1)) == code
4780c87b03e5Sespie && TREE_CODE (TREE_OPERAND (t, 2)) == code
4781c87b03e5Sespie && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
4782c87b03e5Sespie && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
4783c87b03e5Sespie && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4784c87b03e5Sespie == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4785c87b03e5Sespie && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4786c87b03e5Sespie && (INTEGRAL_TYPE_P
4787c87b03e5Sespie (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4788c87b03e5Sespie && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4789c87b03e5Sespie t = build1 (code, type,
4790c87b03e5Sespie build (COND_EXPR,
4791c87b03e5Sespie TREE_TYPE (TREE_OPERAND
4792c87b03e5Sespie (TREE_OPERAND (t, 1), 0)),
4793c87b03e5Sespie TREE_OPERAND (t, 0),
4794c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4795c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4796c87b03e5Sespie return t;
4797c87b03e5Sespie }
4798c87b03e5Sespie else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4799c87b03e5Sespie return fold (build (COND_EXPR, type, arg0,
4800c87b03e5Sespie fold (build1 (code, type, integer_one_node)),
4801c87b03e5Sespie fold (build1 (code, type, integer_zero_node))));
4802c87b03e5Sespie }
4803c87b03e5Sespie else if (TREE_CODE_CLASS (code) == '2'
4804c87b03e5Sespie || TREE_CODE_CLASS (code) == '<')
4805c87b03e5Sespie {
4806c87b03e5Sespie if (TREE_CODE (arg1) == COMPOUND_EXPR
4807c87b03e5Sespie && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
4808c87b03e5Sespie && ! TREE_SIDE_EFFECTS (arg0))
4809c87b03e5Sespie return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4810c87b03e5Sespie fold (build (code, type,
4811c87b03e5Sespie arg0, TREE_OPERAND (arg1, 1))));
4812c87b03e5Sespie else if ((TREE_CODE (arg1) == COND_EXPR
4813c87b03e5Sespie || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4814c87b03e5Sespie && TREE_CODE_CLASS (code) != '<'))
4815c87b03e5Sespie && (TREE_CODE (arg0) != COND_EXPR
4816c87b03e5Sespie || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4817c87b03e5Sespie && (! TREE_SIDE_EFFECTS (arg0)
4818c87b03e5Sespie || ((*lang_hooks.decls.global_bindings_p) () == 0
4819c87b03e5Sespie && ! contains_placeholder_p (arg0))))
4820c87b03e5Sespie return
4821c87b03e5Sespie fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4822c87b03e5Sespie /*cond_first_p=*/0);
4823c87b03e5Sespie else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4824c87b03e5Sespie return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4825c87b03e5Sespie fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4826c87b03e5Sespie else if ((TREE_CODE (arg0) == COND_EXPR
4827c87b03e5Sespie || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4828c87b03e5Sespie && TREE_CODE_CLASS (code) != '<'))
4829c87b03e5Sespie && (TREE_CODE (arg1) != COND_EXPR
4830c87b03e5Sespie || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4831c87b03e5Sespie && (! TREE_SIDE_EFFECTS (arg1)
4832c87b03e5Sespie || ((*lang_hooks.decls.global_bindings_p) () == 0
4833c87b03e5Sespie && ! contains_placeholder_p (arg1))))
4834c87b03e5Sespie return
4835c87b03e5Sespie fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4836c87b03e5Sespie /*cond_first_p=*/1);
4837c87b03e5Sespie }
4838c87b03e5Sespie else if (TREE_CODE_CLASS (code) == '<'
4839c87b03e5Sespie && TREE_CODE (arg0) == COMPOUND_EXPR)
4840c87b03e5Sespie return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4841c87b03e5Sespie fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4842c87b03e5Sespie else if (TREE_CODE_CLASS (code) == '<'
4843c87b03e5Sespie && TREE_CODE (arg1) == COMPOUND_EXPR)
4844c87b03e5Sespie return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4845c87b03e5Sespie fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4846c87b03e5Sespie
4847c87b03e5Sespie switch (code)
4848c87b03e5Sespie {
4849c87b03e5Sespie case INTEGER_CST:
4850c87b03e5Sespie case REAL_CST:
4851c87b03e5Sespie case VECTOR_CST:
4852c87b03e5Sespie case STRING_CST:
4853c87b03e5Sespie case COMPLEX_CST:
4854c87b03e5Sespie case CONSTRUCTOR:
4855c87b03e5Sespie return t;
4856c87b03e5Sespie
4857c87b03e5Sespie case CONST_DECL:
4858c87b03e5Sespie return fold (DECL_INITIAL (t));
4859c87b03e5Sespie
4860c87b03e5Sespie case NOP_EXPR:
4861c87b03e5Sespie case FLOAT_EXPR:
4862c87b03e5Sespie case CONVERT_EXPR:
4863c87b03e5Sespie case FIX_TRUNC_EXPR:
4864c87b03e5Sespie /* Other kinds of FIX are not handled properly by fold_convert. */
4865c87b03e5Sespie
4866c87b03e5Sespie if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4867c87b03e5Sespie return TREE_OPERAND (t, 0);
4868c87b03e5Sespie
4869c87b03e5Sespie /* Handle cases of two conversions in a row. */
4870c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4871c87b03e5Sespie || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4872c87b03e5Sespie {
4873c87b03e5Sespie tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4874c87b03e5Sespie tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4875c87b03e5Sespie tree final_type = TREE_TYPE (t);
4876c87b03e5Sespie int inside_int = INTEGRAL_TYPE_P (inside_type);
4877c87b03e5Sespie int inside_ptr = POINTER_TYPE_P (inside_type);
4878c87b03e5Sespie int inside_float = FLOAT_TYPE_P (inside_type);
4879c87b03e5Sespie unsigned int inside_prec = TYPE_PRECISION (inside_type);
4880c87b03e5Sespie int inside_unsignedp = TREE_UNSIGNED (inside_type);
4881c87b03e5Sespie int inter_int = INTEGRAL_TYPE_P (inter_type);
4882c87b03e5Sespie int inter_ptr = POINTER_TYPE_P (inter_type);
4883c87b03e5Sespie int inter_float = FLOAT_TYPE_P (inter_type);
4884c87b03e5Sespie unsigned int inter_prec = TYPE_PRECISION (inter_type);
4885c87b03e5Sespie int inter_unsignedp = TREE_UNSIGNED (inter_type);
4886c87b03e5Sespie int final_int = INTEGRAL_TYPE_P (final_type);
4887c87b03e5Sespie int final_ptr = POINTER_TYPE_P (final_type);
4888c87b03e5Sespie int final_float = FLOAT_TYPE_P (final_type);
4889c87b03e5Sespie unsigned int final_prec = TYPE_PRECISION (final_type);
4890c87b03e5Sespie int final_unsignedp = TREE_UNSIGNED (final_type);
4891c87b03e5Sespie
4892c87b03e5Sespie /* In addition to the cases of two conversions in a row
4893c87b03e5Sespie handled below, if we are converting something to its own
4894c87b03e5Sespie type via an object of identical or wider precision, neither
4895c87b03e5Sespie conversion is needed. */
4896c87b03e5Sespie if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4897c87b03e5Sespie && ((inter_int && final_int) || (inter_float && final_float))
4898c87b03e5Sespie && inter_prec >= final_prec)
4899c87b03e5Sespie return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4900c87b03e5Sespie
4901c87b03e5Sespie /* Likewise, if the intermediate and final types are either both
4902c87b03e5Sespie float or both integer, we don't need the middle conversion if
4903c87b03e5Sespie it is wider than the final type and doesn't change the signedness
4904c87b03e5Sespie (for integers). Avoid this if the final type is a pointer
4905c87b03e5Sespie since then we sometimes need the inner conversion. Likewise if
4906c87b03e5Sespie the outer has a precision not equal to the size of its mode. */
4907c87b03e5Sespie if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4908c87b03e5Sespie || (inter_float && inside_float))
4909c87b03e5Sespie && inter_prec >= inside_prec
4910c87b03e5Sespie && (inter_float || inter_unsignedp == inside_unsignedp)
4911c87b03e5Sespie && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4912c87b03e5Sespie && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4913c87b03e5Sespie && ! final_ptr)
4914c87b03e5Sespie return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4915c87b03e5Sespie
4916c87b03e5Sespie /* If we have a sign-extension of a zero-extended value, we can
4917c87b03e5Sespie replace that by a single zero-extension. */
4918c87b03e5Sespie if (inside_int && inter_int && final_int
4919c87b03e5Sespie && inside_prec < inter_prec && inter_prec < final_prec
4920c87b03e5Sespie && inside_unsignedp && !inter_unsignedp)
4921c87b03e5Sespie return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4922c87b03e5Sespie
4923c87b03e5Sespie /* Two conversions in a row are not needed unless:
4924c87b03e5Sespie - some conversion is floating-point (overstrict for now), or
4925c87b03e5Sespie - the intermediate type is narrower than both initial and
4926c87b03e5Sespie final, or
4927c87b03e5Sespie - the intermediate type and innermost type differ in signedness,
4928c87b03e5Sespie and the outermost type is wider than the intermediate, or
4929c87b03e5Sespie - the initial type is a pointer type and the precisions of the
4930c87b03e5Sespie intermediate and final types differ, or
4931c87b03e5Sespie - the final type is a pointer type and the precisions of the
4932c87b03e5Sespie initial and intermediate types differ. */
4933c87b03e5Sespie if (! inside_float && ! inter_float && ! final_float
4934c87b03e5Sespie && (inter_prec > inside_prec || inter_prec > final_prec)
4935c87b03e5Sespie && ! (inside_int && inter_int
4936c87b03e5Sespie && inter_unsignedp != inside_unsignedp
4937c87b03e5Sespie && inter_prec < final_prec)
4938c87b03e5Sespie && ((inter_unsignedp && inter_prec > inside_prec)
4939c87b03e5Sespie == (final_unsignedp && final_prec > inter_prec))
4940c87b03e5Sespie && ! (inside_ptr && inter_prec != final_prec)
4941c87b03e5Sespie && ! (final_ptr && inside_prec != inter_prec)
4942c87b03e5Sespie && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4943c87b03e5Sespie && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4944c87b03e5Sespie && ! final_ptr)
4945c87b03e5Sespie return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4946c87b03e5Sespie }
4947c87b03e5Sespie
4948c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4949c87b03e5Sespie && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4950c87b03e5Sespie /* Detect assigning a bitfield. */
4951c87b03e5Sespie && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4952c87b03e5Sespie && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4953c87b03e5Sespie {
4954c87b03e5Sespie /* Don't leave an assignment inside a conversion
4955c87b03e5Sespie unless assigning a bitfield. */
4956c87b03e5Sespie tree prev = TREE_OPERAND (t, 0);
4957c87b03e5Sespie TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4958c87b03e5Sespie /* First do the assignment, then return converted constant. */
4959c87b03e5Sespie t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4960c87b03e5Sespie TREE_USED (t) = 1;
4961c87b03e5Sespie return t;
4962c87b03e5Sespie }
4963c87b03e5Sespie
4964c87b03e5Sespie /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4965c87b03e5Sespie constants (if x has signed type, the sign bit cannot be set
4966c87b03e5Sespie in c). This folds extension into the BIT_AND_EXPR. */
4967c87b03e5Sespie if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4968c87b03e5Sespie && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4969c87b03e5Sespie && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4970c87b03e5Sespie && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4971c87b03e5Sespie {
4972c87b03e5Sespie tree and = TREE_OPERAND (t, 0);
4973c87b03e5Sespie tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4974c87b03e5Sespie int change = 0;
4975c87b03e5Sespie
4976c87b03e5Sespie if (TREE_UNSIGNED (TREE_TYPE (and))
4977c87b03e5Sespie || (TYPE_PRECISION (TREE_TYPE (t))
4978c87b03e5Sespie <= TYPE_PRECISION (TREE_TYPE (and))))
4979c87b03e5Sespie change = 1;
4980c87b03e5Sespie else if (TYPE_PRECISION (TREE_TYPE (and1))
4981c87b03e5Sespie <= HOST_BITS_PER_WIDE_INT
4982c87b03e5Sespie && host_integerp (and1, 1))
4983c87b03e5Sespie {
4984c87b03e5Sespie unsigned HOST_WIDE_INT cst;
4985c87b03e5Sespie
4986c87b03e5Sespie cst = tree_low_cst (and1, 1);
4987c87b03e5Sespie cst &= (HOST_WIDE_INT) -1
4988c87b03e5Sespie << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4989c87b03e5Sespie change = (cst == 0);
4990c87b03e5Sespie #ifdef LOAD_EXTEND_OP
4991c87b03e5Sespie if (change
4992c87b03e5Sespie && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4993c87b03e5Sespie == ZERO_EXTEND))
4994c87b03e5Sespie {
4995c87b03e5Sespie tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4996c87b03e5Sespie and0 = convert (uns, and0);
4997c87b03e5Sespie and1 = convert (uns, and1);
4998c87b03e5Sespie }
4999c87b03e5Sespie #endif
5000c87b03e5Sespie }
5001c87b03e5Sespie if (change)
5002c87b03e5Sespie return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5003c87b03e5Sespie convert (TREE_TYPE (t), and0),
5004c87b03e5Sespie convert (TREE_TYPE (t), and1)));
5005c87b03e5Sespie }
5006c87b03e5Sespie
5007c87b03e5Sespie if (!wins)
5008c87b03e5Sespie {
5009c87b03e5Sespie TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5010c87b03e5Sespie return t;
5011c87b03e5Sespie }
5012c87b03e5Sespie return fold_convert (t, arg0);
5013c87b03e5Sespie
5014c87b03e5Sespie case VIEW_CONVERT_EXPR:
5015c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5016c87b03e5Sespie return build1 (VIEW_CONVERT_EXPR, type,
5017c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5018c87b03e5Sespie return t;
5019c87b03e5Sespie
5020c87b03e5Sespie case COMPONENT_REF:
5021c87b03e5Sespie if (TREE_CODE (arg0) == CONSTRUCTOR)
5022c87b03e5Sespie {
5023c87b03e5Sespie tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5024c87b03e5Sespie if (m)
5025c87b03e5Sespie t = TREE_VALUE (m);
5026c87b03e5Sespie }
5027c87b03e5Sespie return t;
5028c87b03e5Sespie
5029c87b03e5Sespie case RANGE_EXPR:
5030c87b03e5Sespie TREE_CONSTANT (t) = wins;
5031c87b03e5Sespie return t;
5032c87b03e5Sespie
5033c87b03e5Sespie case NEGATE_EXPR:
5034c87b03e5Sespie if (wins)
5035c87b03e5Sespie {
5036c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST)
5037c87b03e5Sespie {
5038c87b03e5Sespie unsigned HOST_WIDE_INT low;
5039c87b03e5Sespie HOST_WIDE_INT high;
5040c87b03e5Sespie int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5041c87b03e5Sespie TREE_INT_CST_HIGH (arg0),
5042c87b03e5Sespie &low, &high);
5043c87b03e5Sespie t = build_int_2 (low, high);
5044c87b03e5Sespie TREE_TYPE (t) = type;
5045c87b03e5Sespie TREE_OVERFLOW (t)
5046c87b03e5Sespie = (TREE_OVERFLOW (arg0)
5047c87b03e5Sespie | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5048c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t)
5049c87b03e5Sespie = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5050cd3be6e5Savsm /* If arg0 was calculated from sizeof(ptr), record this */
5051cd3be6e5Savsm if (SIZEOF_PTR_DERIVED (arg0))
5052cd3be6e5Savsm SIZEOF_PTR_DERIVED (t) = 1;
5053c87b03e5Sespie }
5054c87b03e5Sespie else if (TREE_CODE (arg0) == REAL_CST)
5055c87b03e5Sespie t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5056c87b03e5Sespie }
5057c87b03e5Sespie else if (TREE_CODE (arg0) == NEGATE_EXPR)
5058c87b03e5Sespie return TREE_OPERAND (arg0, 0);
5059c87b03e5Sespie
5060c87b03e5Sespie /* Convert - (a - b) to (b - a) for non-floating-point. */
5061c87b03e5Sespie else if (TREE_CODE (arg0) == MINUS_EXPR
5062c87b03e5Sespie && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5063c87b03e5Sespie return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5064c87b03e5Sespie TREE_OPERAND (arg0, 0));
5065c87b03e5Sespie
5066c87b03e5Sespie return t;
5067c87b03e5Sespie
5068c87b03e5Sespie case ABS_EXPR:
5069c87b03e5Sespie if (wins)
5070c87b03e5Sespie {
5071c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST)
5072c87b03e5Sespie {
5073c87b03e5Sespie /* If the value is unsigned, then the absolute value is
5074c87b03e5Sespie the same as the ordinary value. */
5075c87b03e5Sespie if (TREE_UNSIGNED (type))
5076c87b03e5Sespie return arg0;
5077c87b03e5Sespie /* Similarly, if the value is non-negative. */
5078c87b03e5Sespie else if (INT_CST_LT (integer_minus_one_node, arg0))
5079c87b03e5Sespie return arg0;
5080c87b03e5Sespie /* If the value is negative, then the absolute value is
5081c87b03e5Sespie its negation. */
5082c87b03e5Sespie else
5083c87b03e5Sespie {
5084c87b03e5Sespie unsigned HOST_WIDE_INT low;
5085c87b03e5Sespie HOST_WIDE_INT high;
5086c87b03e5Sespie int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5087c87b03e5Sespie TREE_INT_CST_HIGH (arg0),
5088c87b03e5Sespie &low, &high);
5089c87b03e5Sespie t = build_int_2 (low, high);
5090c87b03e5Sespie TREE_TYPE (t) = type;
5091c87b03e5Sespie TREE_OVERFLOW (t)
5092c87b03e5Sespie = (TREE_OVERFLOW (arg0)
5093c87b03e5Sespie | force_fit_type (t, overflow));
5094c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t)
5095c87b03e5Sespie = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5096cd3be6e5Savsm /* If arg0 was calculated from sizeof(ptr), record this */
5097cd3be6e5Savsm if (SIZEOF_PTR_DERIVED (arg0))
5098cd3be6e5Savsm SIZEOF_PTR_DERIVED (t) = 1;
5099c87b03e5Sespie }
5100c87b03e5Sespie }
5101c87b03e5Sespie else if (TREE_CODE (arg0) == REAL_CST)
5102c87b03e5Sespie {
5103c87b03e5Sespie if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5104c87b03e5Sespie t = build_real (type,
5105c87b03e5Sespie REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5106c87b03e5Sespie }
5107c87b03e5Sespie }
5108c87b03e5Sespie else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5109c87b03e5Sespie return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5110c87b03e5Sespie return t;
5111c87b03e5Sespie
5112c87b03e5Sespie case CONJ_EXPR:
5113c87b03e5Sespie if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5114c87b03e5Sespie return convert (type, arg0);
5115c87b03e5Sespie else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5116c87b03e5Sespie return build (COMPLEX_EXPR, type,
5117c87b03e5Sespie TREE_OPERAND (arg0, 0),
5118c87b03e5Sespie negate_expr (TREE_OPERAND (arg0, 1)));
5119c87b03e5Sespie else if (TREE_CODE (arg0) == COMPLEX_CST)
5120c87b03e5Sespie return build_complex (type, TREE_REALPART (arg0),
5121c87b03e5Sespie negate_expr (TREE_IMAGPART (arg0)));
5122c87b03e5Sespie else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5123c87b03e5Sespie return fold (build (TREE_CODE (arg0), type,
5124c87b03e5Sespie fold (build1 (CONJ_EXPR, type,
5125c87b03e5Sespie TREE_OPERAND (arg0, 0))),
5126c87b03e5Sespie fold (build1 (CONJ_EXPR,
5127c87b03e5Sespie type, TREE_OPERAND (arg0, 1)))));
5128c87b03e5Sespie else if (TREE_CODE (arg0) == CONJ_EXPR)
5129c87b03e5Sespie return TREE_OPERAND (arg0, 0);
5130c87b03e5Sespie return t;
5131c87b03e5Sespie
5132c87b03e5Sespie case BIT_NOT_EXPR:
5133c87b03e5Sespie if (wins)
5134c87b03e5Sespie {
5135c87b03e5Sespie t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5136c87b03e5Sespie ~ TREE_INT_CST_HIGH (arg0));
5137c87b03e5Sespie TREE_TYPE (t) = type;
5138c87b03e5Sespie force_fit_type (t, 0);
5139c87b03e5Sespie TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5140c87b03e5Sespie TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5141c87b03e5Sespie }
5142c87b03e5Sespie else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5143c87b03e5Sespie return TREE_OPERAND (arg0, 0);
5144c87b03e5Sespie return t;
5145c87b03e5Sespie
5146c87b03e5Sespie case PLUS_EXPR:
5147c87b03e5Sespie /* A + (-B) -> A - B */
5148c87b03e5Sespie if (TREE_CODE (arg1) == NEGATE_EXPR)
5149c87b03e5Sespie return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5150c87b03e5Sespie /* (-A) + B -> B - A */
5151c87b03e5Sespie if (TREE_CODE (arg0) == NEGATE_EXPR)
5152c87b03e5Sespie return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5153c87b03e5Sespie else if (! FLOAT_TYPE_P (type))
5154c87b03e5Sespie {
5155c87b03e5Sespie if (integer_zerop (arg1))
5156c87b03e5Sespie return non_lvalue (convert (type, arg0));
5157c87b03e5Sespie
5158c87b03e5Sespie /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5159c87b03e5Sespie with a constant, and the two constants have no bits in common,
5160c87b03e5Sespie we should treat this as a BIT_IOR_EXPR since this may produce more
5161c87b03e5Sespie simplifications. */
5162c87b03e5Sespie if (TREE_CODE (arg0) == BIT_AND_EXPR
5163c87b03e5Sespie && TREE_CODE (arg1) == BIT_AND_EXPR
5164c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5165c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5166c87b03e5Sespie && integer_zerop (const_binop (BIT_AND_EXPR,
5167c87b03e5Sespie TREE_OPERAND (arg0, 1),
5168c87b03e5Sespie TREE_OPERAND (arg1, 1), 0)))
5169c87b03e5Sespie {
5170c87b03e5Sespie code = BIT_IOR_EXPR;
5171c87b03e5Sespie goto bit_ior;
5172c87b03e5Sespie }
5173c87b03e5Sespie
5174c87b03e5Sespie /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5175c87b03e5Sespie (plus (plus (mult) (mult)) (foo)) so that we can
5176c87b03e5Sespie take advantage of the factoring cases below. */
5177c87b03e5Sespie if ((TREE_CODE (arg0) == PLUS_EXPR
5178c87b03e5Sespie && TREE_CODE (arg1) == MULT_EXPR)
5179c87b03e5Sespie || (TREE_CODE (arg1) == PLUS_EXPR
5180c87b03e5Sespie && TREE_CODE (arg0) == MULT_EXPR))
5181c87b03e5Sespie {
5182c87b03e5Sespie tree parg0, parg1, parg, marg;
5183c87b03e5Sespie
5184c87b03e5Sespie if (TREE_CODE (arg0) == PLUS_EXPR)
5185c87b03e5Sespie parg = arg0, marg = arg1;
5186c87b03e5Sespie else
5187c87b03e5Sespie parg = arg1, marg = arg0;
5188c87b03e5Sespie parg0 = TREE_OPERAND (parg, 0);
5189c87b03e5Sespie parg1 = TREE_OPERAND (parg, 1);
5190c87b03e5Sespie STRIP_NOPS (parg0);
5191c87b03e5Sespie STRIP_NOPS (parg1);
5192c87b03e5Sespie
5193c87b03e5Sespie if (TREE_CODE (parg0) == MULT_EXPR
5194c87b03e5Sespie && TREE_CODE (parg1) != MULT_EXPR)
5195c87b03e5Sespie return fold (build (PLUS_EXPR, type,
5196c87b03e5Sespie fold (build (PLUS_EXPR, type, parg0, marg)),
5197c87b03e5Sespie parg1));
5198c87b03e5Sespie if (TREE_CODE (parg0) != MULT_EXPR
5199c87b03e5Sespie && TREE_CODE (parg1) == MULT_EXPR)
5200c87b03e5Sespie return fold (build (PLUS_EXPR, type,
5201c87b03e5Sespie fold (build (PLUS_EXPR, type, parg1, marg)),
5202c87b03e5Sespie parg0));
5203c87b03e5Sespie }
5204c87b03e5Sespie
5205c87b03e5Sespie if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5206c87b03e5Sespie {
5207c87b03e5Sespie tree arg00, arg01, arg10, arg11;
5208c87b03e5Sespie tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5209c87b03e5Sespie
5210c87b03e5Sespie /* (A * C) + (B * C) -> (A+B) * C.
5211c87b03e5Sespie We are most concerned about the case where C is a constant,
5212c87b03e5Sespie but other combinations show up during loop reduction. Since
5213c87b03e5Sespie it is not difficult, try all four possibilities. */
5214c87b03e5Sespie
5215c87b03e5Sespie arg00 = TREE_OPERAND (arg0, 0);
5216c87b03e5Sespie arg01 = TREE_OPERAND (arg0, 1);
5217c87b03e5Sespie arg10 = TREE_OPERAND (arg1, 0);
5218c87b03e5Sespie arg11 = TREE_OPERAND (arg1, 1);
5219c87b03e5Sespie same = NULL_TREE;
5220c87b03e5Sespie
5221c87b03e5Sespie if (operand_equal_p (arg01, arg11, 0))
5222c87b03e5Sespie same = arg01, alt0 = arg00, alt1 = arg10;
5223c87b03e5Sespie else if (operand_equal_p (arg00, arg10, 0))
5224c87b03e5Sespie same = arg00, alt0 = arg01, alt1 = arg11;
5225c87b03e5Sespie else if (operand_equal_p (arg00, arg11, 0))
5226c87b03e5Sespie same = arg00, alt0 = arg01, alt1 = arg10;
5227c87b03e5Sespie else if (operand_equal_p (arg01, arg10, 0))
5228c87b03e5Sespie same = arg01, alt0 = arg00, alt1 = arg11;
5229c87b03e5Sespie
5230c87b03e5Sespie /* No identical multiplicands; see if we can find a common
5231c87b03e5Sespie power-of-two factor in non-power-of-two multiplies. This
5232c87b03e5Sespie can help in multi-dimensional array access. */
5233c87b03e5Sespie else if (TREE_CODE (arg01) == INTEGER_CST
5234c87b03e5Sespie && TREE_CODE (arg11) == INTEGER_CST
5235c87b03e5Sespie && TREE_INT_CST_HIGH (arg01) == 0
5236c87b03e5Sespie && TREE_INT_CST_HIGH (arg11) == 0)
5237c87b03e5Sespie {
5238c87b03e5Sespie HOST_WIDE_INT int01, int11, tmp;
5239c87b03e5Sespie int01 = TREE_INT_CST_LOW (arg01);
5240c87b03e5Sespie int11 = TREE_INT_CST_LOW (arg11);
5241c87b03e5Sespie
5242c87b03e5Sespie /* Move min of absolute values to int11. */
5243c87b03e5Sespie if ((int01 >= 0 ? int01 : -int01)
5244c87b03e5Sespie < (int11 >= 0 ? int11 : -int11))
5245c87b03e5Sespie {
5246c87b03e5Sespie tmp = int01, int01 = int11, int11 = tmp;
5247c87b03e5Sespie alt0 = arg00, arg00 = arg10, arg10 = alt0;
5248c87b03e5Sespie alt0 = arg01, arg01 = arg11, arg11 = alt0;
5249c87b03e5Sespie }
5250c87b03e5Sespie
5251c87b03e5Sespie if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5252c87b03e5Sespie {
5253c87b03e5Sespie alt0 = fold (build (MULT_EXPR, type, arg00,
5254c87b03e5Sespie build_int_2 (int01 / int11, 0)));
5255c87b03e5Sespie alt1 = arg10;
5256c87b03e5Sespie same = arg11;
5257c87b03e5Sespie }
5258c87b03e5Sespie }
5259c87b03e5Sespie
5260c87b03e5Sespie if (same)
5261c87b03e5Sespie return fold (build (MULT_EXPR, type,
5262c87b03e5Sespie fold (build (PLUS_EXPR, type, alt0, alt1)),
5263c87b03e5Sespie same));
5264c87b03e5Sespie }
5265c87b03e5Sespie }
5266c87b03e5Sespie
5267c87b03e5Sespie /* See if ARG1 is zero and X + ARG1 reduces to X. */
5268c87b03e5Sespie else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5269c87b03e5Sespie return non_lvalue (convert (type, arg0));
5270c87b03e5Sespie
5271c87b03e5Sespie /* Likewise if the operands are reversed. */
5272c87b03e5Sespie else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5273c87b03e5Sespie return non_lvalue (convert (type, arg1));
5274c87b03e5Sespie
5275c87b03e5Sespie bit_rotate:
5276c87b03e5Sespie /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5277c87b03e5Sespie is a rotate of A by C1 bits. */
5278c87b03e5Sespie /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5279c87b03e5Sespie is a rotate of A by B bits. */
5280c87b03e5Sespie {
5281c87b03e5Sespie enum tree_code code0, code1;
5282c87b03e5Sespie code0 = TREE_CODE (arg0);
5283c87b03e5Sespie code1 = TREE_CODE (arg1);
5284c87b03e5Sespie if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5285c87b03e5Sespie || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5286c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 0),
5287c87b03e5Sespie TREE_OPERAND (arg1, 0), 0)
5288c87b03e5Sespie && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5289c87b03e5Sespie {
5290c87b03e5Sespie tree tree01, tree11;
5291c87b03e5Sespie enum tree_code code01, code11;
5292c87b03e5Sespie
5293c87b03e5Sespie tree01 = TREE_OPERAND (arg0, 1);
5294c87b03e5Sespie tree11 = TREE_OPERAND (arg1, 1);
5295c87b03e5Sespie STRIP_NOPS (tree01);
5296c87b03e5Sespie STRIP_NOPS (tree11);
5297c87b03e5Sespie code01 = TREE_CODE (tree01);
5298c87b03e5Sespie code11 = TREE_CODE (tree11);
5299c87b03e5Sespie if (code01 == INTEGER_CST
5300c87b03e5Sespie && code11 == INTEGER_CST
5301c87b03e5Sespie && TREE_INT_CST_HIGH (tree01) == 0
5302c87b03e5Sespie && TREE_INT_CST_HIGH (tree11) == 0
5303c87b03e5Sespie && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5304c87b03e5Sespie == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5305c87b03e5Sespie return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5306c87b03e5Sespie code0 == LSHIFT_EXPR ? tree01 : tree11);
5307c87b03e5Sespie else if (code11 == MINUS_EXPR)
5308c87b03e5Sespie {
5309c87b03e5Sespie tree tree110, tree111;
5310c87b03e5Sespie tree110 = TREE_OPERAND (tree11, 0);
5311c87b03e5Sespie tree111 = TREE_OPERAND (tree11, 1);
5312c87b03e5Sespie STRIP_NOPS (tree110);
5313c87b03e5Sespie STRIP_NOPS (tree111);
5314c87b03e5Sespie if (TREE_CODE (tree110) == INTEGER_CST
5315c87b03e5Sespie && 0 == compare_tree_int (tree110,
5316c87b03e5Sespie TYPE_PRECISION
5317c87b03e5Sespie (TREE_TYPE (TREE_OPERAND
5318c87b03e5Sespie (arg0, 0))))
5319c87b03e5Sespie && operand_equal_p (tree01, tree111, 0))
5320c87b03e5Sespie return build ((code0 == LSHIFT_EXPR
5321c87b03e5Sespie ? LROTATE_EXPR
5322c87b03e5Sespie : RROTATE_EXPR),
5323c87b03e5Sespie type, TREE_OPERAND (arg0, 0), tree01);
5324c87b03e5Sespie }
5325c87b03e5Sespie else if (code01 == MINUS_EXPR)
5326c87b03e5Sespie {
5327c87b03e5Sespie tree tree010, tree011;
5328c87b03e5Sespie tree010 = TREE_OPERAND (tree01, 0);
5329c87b03e5Sespie tree011 = TREE_OPERAND (tree01, 1);
5330c87b03e5Sespie STRIP_NOPS (tree010);
5331c87b03e5Sespie STRIP_NOPS (tree011);
5332c87b03e5Sespie if (TREE_CODE (tree010) == INTEGER_CST
5333c87b03e5Sespie && 0 == compare_tree_int (tree010,
5334c87b03e5Sespie TYPE_PRECISION
5335c87b03e5Sespie (TREE_TYPE (TREE_OPERAND
5336c87b03e5Sespie (arg0, 0))))
5337c87b03e5Sespie && operand_equal_p (tree11, tree011, 0))
5338c87b03e5Sespie return build ((code0 != LSHIFT_EXPR
5339c87b03e5Sespie ? LROTATE_EXPR
5340c87b03e5Sespie : RROTATE_EXPR),
5341c87b03e5Sespie type, TREE_OPERAND (arg0, 0), tree11);
5342c87b03e5Sespie }
5343c87b03e5Sespie }
5344c87b03e5Sespie }
5345c87b03e5Sespie
5346c87b03e5Sespie associate:
5347c87b03e5Sespie /* In most languages, can't associate operations on floats through
5348c87b03e5Sespie parentheses. Rather than remember where the parentheses were, we
5349c87b03e5Sespie don't associate floats at all. It shouldn't matter much. However,
5350c87b03e5Sespie associating multiplications is only very slightly inaccurate, so do
5351c87b03e5Sespie that if -funsafe-math-optimizations is specified. */
5352c87b03e5Sespie
5353c87b03e5Sespie if (! wins
5354c87b03e5Sespie && (! FLOAT_TYPE_P (type)
5355c87b03e5Sespie || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5356c87b03e5Sespie {
5357c87b03e5Sespie tree var0, con0, lit0, minus_lit0;
5358c87b03e5Sespie tree var1, con1, lit1, minus_lit1;
5359c87b03e5Sespie
5360c87b03e5Sespie /* Split both trees into variables, constants, and literals. Then
5361c87b03e5Sespie associate each group together, the constants with literals,
5362c87b03e5Sespie then the result with variables. This increases the chances of
5363c87b03e5Sespie literals being recombined later and of generating relocatable
5364c87b03e5Sespie expressions for the sum of a constant and literal. */
5365c87b03e5Sespie var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5366c87b03e5Sespie var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5367c87b03e5Sespie code == MINUS_EXPR);
5368c87b03e5Sespie
5369c87b03e5Sespie /* Only do something if we found more than two objects. Otherwise,
5370c87b03e5Sespie nothing has changed and we risk infinite recursion. */
5371c87b03e5Sespie if (2 < ((var0 != 0) + (var1 != 0)
5372c87b03e5Sespie + (con0 != 0) + (con1 != 0)
5373c87b03e5Sespie + (lit0 != 0) + (lit1 != 0)
5374c87b03e5Sespie + (minus_lit0 != 0) + (minus_lit1 != 0)))
5375c87b03e5Sespie {
5376c87b03e5Sespie /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5377c87b03e5Sespie if (code == MINUS_EXPR)
5378c87b03e5Sespie code = PLUS_EXPR;
5379c87b03e5Sespie
5380c87b03e5Sespie var0 = associate_trees (var0, var1, code, type);
5381c87b03e5Sespie con0 = associate_trees (con0, con1, code, type);
5382c87b03e5Sespie lit0 = associate_trees (lit0, lit1, code, type);
5383c87b03e5Sespie minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5384c87b03e5Sespie
5385c87b03e5Sespie /* Preserve the MINUS_EXPR if the negative part of the literal is
5386c87b03e5Sespie greater than the positive part. Otherwise, the multiplicative
5387c87b03e5Sespie folding code (i.e extract_muldiv) may be fooled in case
5388c87b03e5Sespie unsigned constants are substracted, like in the following
5389c87b03e5Sespie example: ((X*2 + 4) - 8U)/2. */
5390c87b03e5Sespie if (minus_lit0 && lit0)
5391c87b03e5Sespie {
5392c87b03e5Sespie if (tree_int_cst_lt (lit0, minus_lit0))
5393c87b03e5Sespie {
5394c87b03e5Sespie minus_lit0 = associate_trees (minus_lit0, lit0,
5395c87b03e5Sespie MINUS_EXPR, type);
5396c87b03e5Sespie lit0 = 0;
5397c87b03e5Sespie }
5398c87b03e5Sespie else
5399c87b03e5Sespie {
5400c87b03e5Sespie lit0 = associate_trees (lit0, minus_lit0,
5401c87b03e5Sespie MINUS_EXPR, type);
5402c87b03e5Sespie minus_lit0 = 0;
5403c87b03e5Sespie }
5404c87b03e5Sespie }
5405c87b03e5Sespie if (minus_lit0)
5406c87b03e5Sespie {
5407c87b03e5Sespie if (con0 == 0)
5408c87b03e5Sespie return convert (type, associate_trees (var0, minus_lit0,
5409c87b03e5Sespie MINUS_EXPR, type));
5410c87b03e5Sespie else
5411c87b03e5Sespie {
5412c87b03e5Sespie con0 = associate_trees (con0, minus_lit0,
5413c87b03e5Sespie MINUS_EXPR, type);
5414c87b03e5Sespie return convert (type, associate_trees (var0, con0,
5415c87b03e5Sespie PLUS_EXPR, type));
5416c87b03e5Sespie }
5417c87b03e5Sespie }
5418c87b03e5Sespie
5419c87b03e5Sespie con0 = associate_trees (con0, lit0, code, type);
5420c87b03e5Sespie return convert (type, associate_trees (var0, con0, code, type));
5421c87b03e5Sespie }
5422c87b03e5Sespie }
5423c87b03e5Sespie
5424c87b03e5Sespie binary:
5425c87b03e5Sespie if (wins)
5426c87b03e5Sespie t1 = const_binop (code, arg0, arg1, 0);
5427c87b03e5Sespie if (t1 != NULL_TREE)
5428c87b03e5Sespie {
5429c87b03e5Sespie /* The return value should always have
5430c87b03e5Sespie the same type as the original expression. */
5431c87b03e5Sespie if (TREE_TYPE (t1) != TREE_TYPE (t))
5432c87b03e5Sespie t1 = convert (TREE_TYPE (t), t1);
5433c87b03e5Sespie
5434c87b03e5Sespie return t1;
5435c87b03e5Sespie }
5436c87b03e5Sespie return t;
5437c87b03e5Sespie
5438c87b03e5Sespie case MINUS_EXPR:
5439c87b03e5Sespie /* A - (-B) -> A + B */
5440c87b03e5Sespie if (TREE_CODE (arg1) == NEGATE_EXPR)
5441c87b03e5Sespie return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5442c87b03e5Sespie /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5443c87b03e5Sespie if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5444c87b03e5Sespie return
5445c87b03e5Sespie fold (build (MINUS_EXPR, type,
5446c87b03e5Sespie build_real (TREE_TYPE (arg1),
5447c87b03e5Sespie REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5448c87b03e5Sespie TREE_OPERAND (arg0, 0)));
5449c87b03e5Sespie
5450c87b03e5Sespie if (! FLOAT_TYPE_P (type))
5451c87b03e5Sespie {
5452c87b03e5Sespie if (! wins && integer_zerop (arg0))
5453c87b03e5Sespie return negate_expr (convert (type, arg1));
5454c87b03e5Sespie if (integer_zerop (arg1))
5455c87b03e5Sespie return non_lvalue (convert (type, arg0));
5456c87b03e5Sespie
5457c87b03e5Sespie /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5458c87b03e5Sespie about the case where C is a constant, just try one of the
5459c87b03e5Sespie four possibilities. */
5460c87b03e5Sespie
5461c87b03e5Sespie if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5462c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
5463c87b03e5Sespie TREE_OPERAND (arg1, 1), 0))
5464c87b03e5Sespie return fold (build (MULT_EXPR, type,
5465c87b03e5Sespie fold (build (MINUS_EXPR, type,
5466c87b03e5Sespie TREE_OPERAND (arg0, 0),
5467c87b03e5Sespie TREE_OPERAND (arg1, 0))),
5468c87b03e5Sespie TREE_OPERAND (arg0, 1)));
5469c87b03e5Sespie }
5470c87b03e5Sespie
5471c87b03e5Sespie /* See if ARG1 is zero and X - ARG1 reduces to X. */
5472c87b03e5Sespie else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5473c87b03e5Sespie return non_lvalue (convert (type, arg0));
5474c87b03e5Sespie
5475c87b03e5Sespie /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5476c87b03e5Sespie ARG0 is zero and X + ARG0 reduces to X, since that would mean
5477c87b03e5Sespie (-ARG1 + ARG0) reduces to -ARG1. */
5478c87b03e5Sespie else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5479c87b03e5Sespie return negate_expr (convert (type, arg1));
5480c87b03e5Sespie
5481c87b03e5Sespie /* Fold &x - &x. This can happen from &x.foo - &x.
5482c87b03e5Sespie This is unsafe for certain floats even in non-IEEE formats.
5483c87b03e5Sespie In IEEE, it is unsafe because it does wrong for NaNs.
5484c87b03e5Sespie Also note that operand_equal_p is always false if an operand
5485c87b03e5Sespie is volatile. */
5486c87b03e5Sespie
5487c87b03e5Sespie if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5488c87b03e5Sespie && operand_equal_p (arg0, arg1, 0))
5489c87b03e5Sespie return convert (type, integer_zero_node);
5490c87b03e5Sespie
5491c87b03e5Sespie goto associate;
5492c87b03e5Sespie
5493c87b03e5Sespie case MULT_EXPR:
5494c87b03e5Sespie /* (-A) * (-B) -> A * B */
5495c87b03e5Sespie if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5496c87b03e5Sespie return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5497c87b03e5Sespie TREE_OPERAND (arg1, 0)));
5498c87b03e5Sespie
5499c87b03e5Sespie if (! FLOAT_TYPE_P (type))
5500c87b03e5Sespie {
5501c87b03e5Sespie if (integer_zerop (arg1))
5502c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5503c87b03e5Sespie if (integer_onep (arg1))
5504c87b03e5Sespie return non_lvalue (convert (type, arg0));
5505c87b03e5Sespie
5506c87b03e5Sespie /* (a * (1 << b)) is (a << b) */
5507c87b03e5Sespie if (TREE_CODE (arg1) == LSHIFT_EXPR
5508c87b03e5Sespie && integer_onep (TREE_OPERAND (arg1, 0)))
5509c87b03e5Sespie return fold (build (LSHIFT_EXPR, type, arg0,
5510c87b03e5Sespie TREE_OPERAND (arg1, 1)));
5511c87b03e5Sespie if (TREE_CODE (arg0) == LSHIFT_EXPR
5512c87b03e5Sespie && integer_onep (TREE_OPERAND (arg0, 0)))
5513c87b03e5Sespie return fold (build (LSHIFT_EXPR, type, arg1,
5514c87b03e5Sespie TREE_OPERAND (arg0, 1)));
5515c87b03e5Sespie
5516c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST
5517c87b03e5Sespie && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5518c87b03e5Sespie code, NULL_TREE)))
5519c87b03e5Sespie return convert (type, tem);
5520c87b03e5Sespie
5521c87b03e5Sespie }
5522c87b03e5Sespie else
5523c87b03e5Sespie {
5524c87b03e5Sespie /* Maybe fold x * 0 to 0. The expressions aren't the same
5525c87b03e5Sespie when x is NaN, since x * 0 is also NaN. Nor are they the
5526c87b03e5Sespie same in modes with signed zeros, since multiplying a
5527c87b03e5Sespie negative value by 0 gives -0, not +0. */
5528c87b03e5Sespie if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5529c87b03e5Sespie && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5530c87b03e5Sespie && real_zerop (arg1))
5531c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5532c87b03e5Sespie /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5533c87b03e5Sespie if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5534c87b03e5Sespie && real_onep (arg1))
5535c87b03e5Sespie return non_lvalue (convert (type, arg0));
5536c87b03e5Sespie
5537c87b03e5Sespie /* Transform x * -1.0 into -x. */
5538c87b03e5Sespie if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5539c87b03e5Sespie && real_minus_onep (arg1))
5540c87b03e5Sespie return fold (build1 (NEGATE_EXPR, type, arg0));
5541c87b03e5Sespie
5542c87b03e5Sespie /* x*2 is x+x */
5543c87b03e5Sespie if (! wins && real_twop (arg1)
5544c87b03e5Sespie && (*lang_hooks.decls.global_bindings_p) () == 0
5545c87b03e5Sespie && ! contains_placeholder_p (arg0))
5546c87b03e5Sespie {
5547c87b03e5Sespie tree arg = save_expr (arg0);
5548c87b03e5Sespie return build (PLUS_EXPR, type, arg, arg);
5549c87b03e5Sespie }
5550c87b03e5Sespie }
5551c87b03e5Sespie goto associate;
5552c87b03e5Sespie
5553c87b03e5Sespie case BIT_IOR_EXPR:
5554c87b03e5Sespie bit_ior:
5555c87b03e5Sespie if (integer_all_onesp (arg1))
5556c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5557c87b03e5Sespie if (integer_zerop (arg1))
5558c87b03e5Sespie return non_lvalue (convert (type, arg0));
5559c87b03e5Sespie t1 = distribute_bit_expr (code, type, arg0, arg1);
5560c87b03e5Sespie if (t1 != NULL_TREE)
5561c87b03e5Sespie return t1;
5562c87b03e5Sespie
5563c87b03e5Sespie /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5564c87b03e5Sespie
5565c87b03e5Sespie This results in more efficient code for machines without a NAND
5566c87b03e5Sespie instruction. Combine will canonicalize to the first form
5567c87b03e5Sespie which will allow use of NAND instructions provided by the
5568c87b03e5Sespie backend if they exist. */
5569c87b03e5Sespie if (TREE_CODE (arg0) == BIT_NOT_EXPR
5570c87b03e5Sespie && TREE_CODE (arg1) == BIT_NOT_EXPR)
5571c87b03e5Sespie {
5572c87b03e5Sespie return fold (build1 (BIT_NOT_EXPR, type,
5573c87b03e5Sespie build (BIT_AND_EXPR, type,
5574c87b03e5Sespie TREE_OPERAND (arg0, 0),
5575c87b03e5Sespie TREE_OPERAND (arg1, 0))));
5576c87b03e5Sespie }
5577c87b03e5Sespie
5578c87b03e5Sespie /* See if this can be simplified into a rotate first. If that
5579c87b03e5Sespie is unsuccessful continue in the association code. */
5580c87b03e5Sespie goto bit_rotate;
5581c87b03e5Sespie
5582c87b03e5Sespie case BIT_XOR_EXPR:
5583c87b03e5Sespie if (integer_zerop (arg1))
5584c87b03e5Sespie return non_lvalue (convert (type, arg0));
5585c87b03e5Sespie if (integer_all_onesp (arg1))
5586c87b03e5Sespie return fold (build1 (BIT_NOT_EXPR, type, arg0));
5587c87b03e5Sespie
5588c87b03e5Sespie /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5589c87b03e5Sespie with a constant, and the two constants have no bits in common,
5590c87b03e5Sespie we should treat this as a BIT_IOR_EXPR since this may produce more
5591c87b03e5Sespie simplifications. */
5592c87b03e5Sespie if (TREE_CODE (arg0) == BIT_AND_EXPR
5593c87b03e5Sespie && TREE_CODE (arg1) == BIT_AND_EXPR
5594c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5595c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5596c87b03e5Sespie && integer_zerop (const_binop (BIT_AND_EXPR,
5597c87b03e5Sespie TREE_OPERAND (arg0, 1),
5598c87b03e5Sespie TREE_OPERAND (arg1, 1), 0)))
5599c87b03e5Sespie {
5600c87b03e5Sespie code = BIT_IOR_EXPR;
5601c87b03e5Sespie goto bit_ior;
5602c87b03e5Sespie }
5603c87b03e5Sespie
5604c87b03e5Sespie /* See if this can be simplified into a rotate first. If that
5605c87b03e5Sespie is unsuccessful continue in the association code. */
5606c87b03e5Sespie goto bit_rotate;
5607c87b03e5Sespie
5608c87b03e5Sespie case BIT_AND_EXPR:
5609c87b03e5Sespie bit_and:
5610c87b03e5Sespie if (integer_all_onesp (arg1))
5611c87b03e5Sespie return non_lvalue (convert (type, arg0));
5612c87b03e5Sespie if (integer_zerop (arg1))
5613c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5614c87b03e5Sespie t1 = distribute_bit_expr (code, type, arg0, arg1);
5615c87b03e5Sespie if (t1 != NULL_TREE)
5616c87b03e5Sespie return t1;
5617c87b03e5Sespie /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5618c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5619c87b03e5Sespie && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5620c87b03e5Sespie {
5621c87b03e5Sespie unsigned int prec
5622c87b03e5Sespie = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5623c87b03e5Sespie
5624c87b03e5Sespie if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5625c87b03e5Sespie && (~TREE_INT_CST_LOW (arg1)
5626c87b03e5Sespie & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5627c87b03e5Sespie return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5628c87b03e5Sespie }
5629c87b03e5Sespie
5630c87b03e5Sespie /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5631c87b03e5Sespie
5632c87b03e5Sespie This results in more efficient code for machines without a NOR
5633c87b03e5Sespie instruction. Combine will canonicalize to the first form
5634c87b03e5Sespie which will allow use of NOR instructions provided by the
5635c87b03e5Sespie backend if they exist. */
5636c87b03e5Sespie if (TREE_CODE (arg0) == BIT_NOT_EXPR
5637c87b03e5Sespie && TREE_CODE (arg1) == BIT_NOT_EXPR)
5638c87b03e5Sespie {
5639c87b03e5Sespie return fold (build1 (BIT_NOT_EXPR, type,
5640c87b03e5Sespie build (BIT_IOR_EXPR, type,
5641c87b03e5Sespie TREE_OPERAND (arg0, 0),
5642c87b03e5Sespie TREE_OPERAND (arg1, 0))));
5643c87b03e5Sespie }
5644c87b03e5Sespie
5645c87b03e5Sespie goto associate;
5646c87b03e5Sespie
5647c87b03e5Sespie case BIT_ANDTC_EXPR:
5648c87b03e5Sespie if (integer_all_onesp (arg0))
5649c87b03e5Sespie return non_lvalue (convert (type, arg1));
5650c87b03e5Sespie if (integer_zerop (arg0))
5651c87b03e5Sespie return omit_one_operand (type, arg0, arg1);
5652c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST)
5653c87b03e5Sespie {
5654c87b03e5Sespie arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5655c87b03e5Sespie code = BIT_AND_EXPR;
5656c87b03e5Sespie goto bit_and;
5657c87b03e5Sespie }
5658c87b03e5Sespie goto binary;
5659c87b03e5Sespie
5660c87b03e5Sespie case RDIV_EXPR:
5661c87b03e5Sespie /* Don't touch a floating-point divide by zero unless the mode
5662c87b03e5Sespie of the constant can represent infinity. */
5663c87b03e5Sespie if (TREE_CODE (arg1) == REAL_CST
5664c87b03e5Sespie && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5665c87b03e5Sespie && real_zerop (arg1))
5666c87b03e5Sespie return t;
5667c87b03e5Sespie
5668c87b03e5Sespie /* (-A) / (-B) -> A / B */
5669c87b03e5Sespie if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5670c87b03e5Sespie return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5671c87b03e5Sespie TREE_OPERAND (arg1, 0)));
5672c87b03e5Sespie
5673c87b03e5Sespie /* In IEEE floating point, x/1 is not equivalent to x for snans. */
5674c87b03e5Sespie if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5675c87b03e5Sespie && real_onep (arg1))
5676c87b03e5Sespie return non_lvalue (convert (type, arg0));
5677c87b03e5Sespie
5678c87b03e5Sespie /* If ARG1 is a constant, we can convert this to a multiply by the
5679c87b03e5Sespie reciprocal. This does not have the same rounding properties,
5680c87b03e5Sespie so only do this if -funsafe-math-optimizations. We can actually
5681c87b03e5Sespie always safely do it if ARG1 is a power of two, but it's hard to
5682c87b03e5Sespie tell if it is or not in a portable manner. */
5683c87b03e5Sespie if (TREE_CODE (arg1) == REAL_CST)
5684c87b03e5Sespie {
5685c87b03e5Sespie if (flag_unsafe_math_optimizations
5686c87b03e5Sespie && 0 != (tem = const_binop (code, build_real (type, dconst1),
5687c87b03e5Sespie arg1, 0)))
5688c87b03e5Sespie return fold (build (MULT_EXPR, type, arg0, tem));
5689c87b03e5Sespie /* Find the reciprocal if optimizing and the result is exact. */
5690c87b03e5Sespie else if (optimize)
5691c87b03e5Sespie {
5692c87b03e5Sespie REAL_VALUE_TYPE r;
5693c87b03e5Sespie r = TREE_REAL_CST (arg1);
5694c87b03e5Sespie if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5695c87b03e5Sespie {
5696c87b03e5Sespie tem = build_real (type, r);
5697c87b03e5Sespie return fold (build (MULT_EXPR, type, arg0, tem));
5698c87b03e5Sespie }
5699c87b03e5Sespie }
5700c87b03e5Sespie }
5701c87b03e5Sespie /* Convert A/B/C to A/(B*C). */
5702c87b03e5Sespie if (flag_unsafe_math_optimizations
5703c87b03e5Sespie && TREE_CODE (arg0) == RDIV_EXPR)
5704c87b03e5Sespie {
5705c87b03e5Sespie return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5706c87b03e5Sespie build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5707c87b03e5Sespie arg1)));
5708c87b03e5Sespie }
5709c87b03e5Sespie /* Convert A/(B/C) to (A/B)*C. */
5710c87b03e5Sespie if (flag_unsafe_math_optimizations
5711c87b03e5Sespie && TREE_CODE (arg1) == RDIV_EXPR)
5712c87b03e5Sespie {
5713c87b03e5Sespie return fold (build (MULT_EXPR, type,
5714c87b03e5Sespie build (RDIV_EXPR, type, arg0,
5715c87b03e5Sespie TREE_OPERAND (arg1, 0)),
5716c87b03e5Sespie TREE_OPERAND (arg1, 1)));
5717c87b03e5Sespie }
5718c87b03e5Sespie goto binary;
5719c87b03e5Sespie
5720c87b03e5Sespie case TRUNC_DIV_EXPR:
5721c87b03e5Sespie case ROUND_DIV_EXPR:
5722c87b03e5Sespie case FLOOR_DIV_EXPR:
5723c87b03e5Sespie case CEIL_DIV_EXPR:
5724c87b03e5Sespie case EXACT_DIV_EXPR:
5725c87b03e5Sespie if (integer_onep (arg1))
5726c87b03e5Sespie return non_lvalue (convert (type, arg0));
5727c87b03e5Sespie if (integer_zerop (arg1))
5728c87b03e5Sespie return t;
5729c87b03e5Sespie
5730c87b03e5Sespie /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5731c87b03e5Sespie operation, EXACT_DIV_EXPR.
5732c87b03e5Sespie
5733c87b03e5Sespie Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5734c87b03e5Sespie At one time others generated faster code, it's not clear if they do
5735c87b03e5Sespie after the last round to changes to the DIV code in expmed.c. */
5736c87b03e5Sespie if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5737c87b03e5Sespie && multiple_of_p (type, arg0, arg1))
5738c87b03e5Sespie return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5739c87b03e5Sespie
5740c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST
5741c87b03e5Sespie && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5742c87b03e5Sespie code, NULL_TREE)))
5743c87b03e5Sespie return convert (type, tem);
5744c87b03e5Sespie
5745c87b03e5Sespie goto binary;
5746c87b03e5Sespie
5747c87b03e5Sespie case CEIL_MOD_EXPR:
5748c87b03e5Sespie case FLOOR_MOD_EXPR:
5749c87b03e5Sespie case ROUND_MOD_EXPR:
5750c87b03e5Sespie case TRUNC_MOD_EXPR:
5751c87b03e5Sespie if (integer_onep (arg1))
5752c87b03e5Sespie return omit_one_operand (type, integer_zero_node, arg0);
5753c87b03e5Sespie if (integer_zerop (arg1))
5754c87b03e5Sespie return t;
5755c87b03e5Sespie
5756c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST
5757c87b03e5Sespie && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5758c87b03e5Sespie code, NULL_TREE)))
5759c87b03e5Sespie return convert (type, tem);
5760c87b03e5Sespie
5761c87b03e5Sespie goto binary;
5762c87b03e5Sespie
5763c87b03e5Sespie case LSHIFT_EXPR:
5764c87b03e5Sespie case RSHIFT_EXPR:
5765c87b03e5Sespie case LROTATE_EXPR:
5766c87b03e5Sespie case RROTATE_EXPR:
5767c87b03e5Sespie if (integer_zerop (arg1))
5768c87b03e5Sespie return non_lvalue (convert (type, arg0));
5769c87b03e5Sespie /* Since negative shift count is not well-defined,
5770c87b03e5Sespie don't try to compute it in the compiler. */
5771c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5772c87b03e5Sespie return t;
5773c87b03e5Sespie /* Rewrite an LROTATE_EXPR by a constant into an
5774c87b03e5Sespie RROTATE_EXPR by a new constant. */
5775c87b03e5Sespie if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5776c87b03e5Sespie {
5777c87b03e5Sespie TREE_SET_CODE (t, RROTATE_EXPR);
5778c87b03e5Sespie code = RROTATE_EXPR;
5779c87b03e5Sespie TREE_OPERAND (t, 1) = arg1
5780c87b03e5Sespie = const_binop
5781c87b03e5Sespie (MINUS_EXPR,
5782c87b03e5Sespie convert (TREE_TYPE (arg1),
5783c87b03e5Sespie build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5784c87b03e5Sespie arg1, 0);
5785c87b03e5Sespie if (tree_int_cst_sgn (arg1) < 0)
5786c87b03e5Sespie return t;
5787c87b03e5Sespie }
5788c87b03e5Sespie
5789c87b03e5Sespie /* If we have a rotate of a bit operation with the rotate count and
5790c87b03e5Sespie the second operand of the bit operation both constant,
5791c87b03e5Sespie permute the two operations. */
5792c87b03e5Sespie if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5793c87b03e5Sespie && (TREE_CODE (arg0) == BIT_AND_EXPR
5794c87b03e5Sespie || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5795c87b03e5Sespie || TREE_CODE (arg0) == BIT_IOR_EXPR
5796c87b03e5Sespie || TREE_CODE (arg0) == BIT_XOR_EXPR)
5797c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5798c87b03e5Sespie return fold (build (TREE_CODE (arg0), type,
5799c87b03e5Sespie fold (build (code, type,
5800c87b03e5Sespie TREE_OPERAND (arg0, 0), arg1)),
5801c87b03e5Sespie fold (build (code, type,
5802c87b03e5Sespie TREE_OPERAND (arg0, 1), arg1))));
5803c87b03e5Sespie
5804c87b03e5Sespie /* Two consecutive rotates adding up to the width of the mode can
5805c87b03e5Sespie be ignored. */
5806c87b03e5Sespie if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5807c87b03e5Sespie && TREE_CODE (arg0) == RROTATE_EXPR
5808c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5809c87b03e5Sespie && TREE_INT_CST_HIGH (arg1) == 0
5810c87b03e5Sespie && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5811c87b03e5Sespie && ((TREE_INT_CST_LOW (arg1)
5812c87b03e5Sespie + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5813c87b03e5Sespie == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5814c87b03e5Sespie return TREE_OPERAND (arg0, 0);
5815c87b03e5Sespie
5816c87b03e5Sespie goto binary;
5817c87b03e5Sespie
5818c87b03e5Sespie case MIN_EXPR:
5819c87b03e5Sespie if (operand_equal_p (arg0, arg1, 0))
5820c87b03e5Sespie return omit_one_operand (type, arg0, arg1);
5821c87b03e5Sespie if (INTEGRAL_TYPE_P (type)
5822c87b03e5Sespie && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5823c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5824c87b03e5Sespie goto associate;
5825c87b03e5Sespie
5826c87b03e5Sespie case MAX_EXPR:
5827c87b03e5Sespie if (operand_equal_p (arg0, arg1, 0))
5828c87b03e5Sespie return omit_one_operand (type, arg0, arg1);
5829c87b03e5Sespie if (INTEGRAL_TYPE_P (type)
5830c87b03e5Sespie && TYPE_MAX_VALUE (type)
5831c87b03e5Sespie && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5832c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5833c87b03e5Sespie goto associate;
5834c87b03e5Sespie
5835c87b03e5Sespie case TRUTH_NOT_EXPR:
5836c87b03e5Sespie /* Note that the operand of this must be an int
5837c87b03e5Sespie and its values must be 0 or 1.
5838c87b03e5Sespie ("true" is a fixed value perhaps depending on the language,
5839c87b03e5Sespie but we don't handle values other than 1 correctly yet.) */
5840c87b03e5Sespie tem = invert_truthvalue (arg0);
5841c87b03e5Sespie /* Avoid infinite recursion. */
5842c87b03e5Sespie if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5843c87b03e5Sespie return t;
5844c87b03e5Sespie return convert (type, tem);
5845c87b03e5Sespie
5846c87b03e5Sespie case TRUTH_ANDIF_EXPR:
5847c87b03e5Sespie /* Note that the operands of this must be ints
5848c87b03e5Sespie and their values must be 0 or 1.
5849c87b03e5Sespie ("true" is a fixed value perhaps depending on the language.) */
5850c87b03e5Sespie /* If first arg is constant zero, return it. */
5851c87b03e5Sespie if (integer_zerop (arg0))
5852c87b03e5Sespie return convert (type, arg0);
5853c87b03e5Sespie case TRUTH_AND_EXPR:
5854c87b03e5Sespie /* If either arg is constant true, drop it. */
5855c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5856c87b03e5Sespie return non_lvalue (convert (type, arg1));
5857c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5858c87b03e5Sespie /* Preserve sequence points. */
5859c87b03e5Sespie && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5860c87b03e5Sespie return non_lvalue (convert (type, arg0));
5861c87b03e5Sespie /* If second arg is constant zero, result is zero, but first arg
5862c87b03e5Sespie must be evaluated. */
5863c87b03e5Sespie if (integer_zerop (arg1))
5864c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5865c87b03e5Sespie /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5866c87b03e5Sespie case will be handled here. */
5867c87b03e5Sespie if (integer_zerop (arg0))
5868c87b03e5Sespie return omit_one_operand (type, arg0, arg1);
5869c87b03e5Sespie
5870c87b03e5Sespie truth_andor:
5871c87b03e5Sespie /* We only do these simplifications if we are optimizing. */
5872c87b03e5Sespie if (!optimize)
5873c87b03e5Sespie return t;
5874c87b03e5Sespie
5875c87b03e5Sespie /* Check for things like (A || B) && (A || C). We can convert this
5876c87b03e5Sespie to A || (B && C). Note that either operator can be any of the four
5877c87b03e5Sespie truth and/or operations and the transformation will still be
5878c87b03e5Sespie valid. Also note that we only care about order for the
5879c87b03e5Sespie ANDIF and ORIF operators. If B contains side effects, this
5880c87b03e5Sespie might change the truth-value of A. */
5881c87b03e5Sespie if (TREE_CODE (arg0) == TREE_CODE (arg1)
5882c87b03e5Sespie && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5883c87b03e5Sespie || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5884c87b03e5Sespie || TREE_CODE (arg0) == TRUTH_AND_EXPR
5885c87b03e5Sespie || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5886c87b03e5Sespie && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5887c87b03e5Sespie {
5888c87b03e5Sespie tree a00 = TREE_OPERAND (arg0, 0);
5889c87b03e5Sespie tree a01 = TREE_OPERAND (arg0, 1);
5890c87b03e5Sespie tree a10 = TREE_OPERAND (arg1, 0);
5891c87b03e5Sespie tree a11 = TREE_OPERAND (arg1, 1);
5892c87b03e5Sespie int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5893c87b03e5Sespie || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5894c87b03e5Sespie && (code == TRUTH_AND_EXPR
5895c87b03e5Sespie || code == TRUTH_OR_EXPR));
5896c87b03e5Sespie
5897c87b03e5Sespie if (operand_equal_p (a00, a10, 0))
5898c87b03e5Sespie return fold (build (TREE_CODE (arg0), type, a00,
5899c87b03e5Sespie fold (build (code, type, a01, a11))));
5900c87b03e5Sespie else if (commutative && operand_equal_p (a00, a11, 0))
5901c87b03e5Sespie return fold (build (TREE_CODE (arg0), type, a00,
5902c87b03e5Sespie fold (build (code, type, a01, a10))));
5903c87b03e5Sespie else if (commutative && operand_equal_p (a01, a10, 0))
5904c87b03e5Sespie return fold (build (TREE_CODE (arg0), type, a01,
5905c87b03e5Sespie fold (build (code, type, a00, a11))));
5906c87b03e5Sespie
5907c87b03e5Sespie /* This case if tricky because we must either have commutative
5908c87b03e5Sespie operators or else A10 must not have side-effects. */
5909c87b03e5Sespie
5910c87b03e5Sespie else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5911c87b03e5Sespie && operand_equal_p (a01, a11, 0))
5912c87b03e5Sespie return fold (build (TREE_CODE (arg0), type,
5913c87b03e5Sespie fold (build (code, type, a00, a10)),
5914c87b03e5Sespie a01));
5915c87b03e5Sespie }
5916c87b03e5Sespie
5917c87b03e5Sespie /* See if we can build a range comparison. */
5918c87b03e5Sespie if (0 != (tem = fold_range_test (t)))
5919c87b03e5Sespie return tem;
5920c87b03e5Sespie
5921c87b03e5Sespie /* Check for the possibility of merging component references. If our
5922c87b03e5Sespie lhs is another similar operation, try to merge its rhs with our
5923c87b03e5Sespie rhs. Then try to merge our lhs and rhs. */
5924c87b03e5Sespie if (TREE_CODE (arg0) == code
5925c87b03e5Sespie && 0 != (tem = fold_truthop (code, type,
5926c87b03e5Sespie TREE_OPERAND (arg0, 1), arg1)))
5927c87b03e5Sespie return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5928c87b03e5Sespie
5929c87b03e5Sespie if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5930c87b03e5Sespie return tem;
5931c87b03e5Sespie
5932c87b03e5Sespie return t;
5933c87b03e5Sespie
5934c87b03e5Sespie case TRUTH_ORIF_EXPR:
5935c87b03e5Sespie /* Note that the operands of this must be ints
5936c87b03e5Sespie and their values must be 0 or true.
5937c87b03e5Sespie ("true" is a fixed value perhaps depending on the language.) */
5938c87b03e5Sespie /* If first arg is constant true, return it. */
5939c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5940c87b03e5Sespie return convert (type, arg0);
5941c87b03e5Sespie case TRUTH_OR_EXPR:
5942c87b03e5Sespie /* If either arg is constant zero, drop it. */
5943c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5944c87b03e5Sespie return non_lvalue (convert (type, arg1));
5945c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5946c87b03e5Sespie /* Preserve sequence points. */
5947c87b03e5Sespie && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5948c87b03e5Sespie return non_lvalue (convert (type, arg0));
5949c87b03e5Sespie /* If second arg is constant true, result is true, but we must
5950c87b03e5Sespie evaluate first arg. */
5951c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5952c87b03e5Sespie return omit_one_operand (type, arg1, arg0);
5953c87b03e5Sespie /* Likewise for first arg, but note this only occurs here for
5954c87b03e5Sespie TRUTH_OR_EXPR. */
5955c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5956c87b03e5Sespie return omit_one_operand (type, arg0, arg1);
5957c87b03e5Sespie goto truth_andor;
5958c87b03e5Sespie
5959c87b03e5Sespie case TRUTH_XOR_EXPR:
5960c87b03e5Sespie /* If either arg is constant zero, drop it. */
5961c87b03e5Sespie if (integer_zerop (arg0))
5962c87b03e5Sespie return non_lvalue (convert (type, arg1));
5963c87b03e5Sespie if (integer_zerop (arg1))
5964c87b03e5Sespie return non_lvalue (convert (type, arg0));
5965c87b03e5Sespie /* If either arg is constant true, this is a logical inversion. */
5966c87b03e5Sespie if (integer_onep (arg0))
5967c87b03e5Sespie return non_lvalue (convert (type, invert_truthvalue (arg1)));
5968c87b03e5Sespie if (integer_onep (arg1))
5969c87b03e5Sespie return non_lvalue (convert (type, invert_truthvalue (arg0)));
5970c87b03e5Sespie return t;
5971c87b03e5Sespie
5972c87b03e5Sespie case EQ_EXPR:
5973c87b03e5Sespie case NE_EXPR:
5974c87b03e5Sespie case LT_EXPR:
5975c87b03e5Sespie case GT_EXPR:
5976c87b03e5Sespie case LE_EXPR:
5977c87b03e5Sespie case GE_EXPR:
5978c87b03e5Sespie /* If one arg is a real or integer constant, put it last. */
5979c87b03e5Sespie if ((TREE_CODE (arg0) == INTEGER_CST
5980c87b03e5Sespie && TREE_CODE (arg1) != INTEGER_CST)
5981c87b03e5Sespie || (TREE_CODE (arg0) == REAL_CST
5982c87b03e5Sespie && TREE_CODE (arg0) != REAL_CST))
5983c87b03e5Sespie {
5984c87b03e5Sespie TREE_OPERAND (t, 0) = arg1;
5985c87b03e5Sespie TREE_OPERAND (t, 1) = arg0;
5986c87b03e5Sespie arg0 = TREE_OPERAND (t, 0);
5987c87b03e5Sespie arg1 = TREE_OPERAND (t, 1);
5988c87b03e5Sespie code = swap_tree_comparison (code);
5989c87b03e5Sespie TREE_SET_CODE (t, code);
5990c87b03e5Sespie }
5991c87b03e5Sespie
5992c87b03e5Sespie if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5993c87b03e5Sespie {
5994c87b03e5Sespie /* (-a) CMP (-b) -> b CMP a */
5995c87b03e5Sespie if (TREE_CODE (arg0) == NEGATE_EXPR
5996c87b03e5Sespie && TREE_CODE (arg1) == NEGATE_EXPR)
5997c87b03e5Sespie return fold (build (code, type, TREE_OPERAND (arg1, 0),
5998c87b03e5Sespie TREE_OPERAND (arg0, 0)));
5999c87b03e5Sespie /* (-a) CMP CST -> a swap(CMP) (-CST) */
6000c87b03e5Sespie if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6001c87b03e5Sespie return
6002c87b03e5Sespie fold (build
6003c87b03e5Sespie (swap_tree_comparison (code), type,
6004c87b03e5Sespie TREE_OPERAND (arg0, 0),
6005c87b03e5Sespie build_real (TREE_TYPE (arg1),
6006c87b03e5Sespie REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6007c87b03e5Sespie /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6008c87b03e5Sespie /* a CMP (-0) -> a CMP 0 */
6009c87b03e5Sespie if (TREE_CODE (arg1) == REAL_CST
6010c87b03e5Sespie && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6011c87b03e5Sespie return fold (build (code, type, arg0,
6012c87b03e5Sespie build_real (TREE_TYPE (arg1), dconst0)));
6013c87b03e5Sespie
6014c87b03e5Sespie /* If this is a comparison of a real constant with a PLUS_EXPR
6015c87b03e5Sespie or a MINUS_EXPR of a real constant, we can convert it into a
6016c87b03e5Sespie comparison with a revised real constant as long as no overflow
6017c87b03e5Sespie occurs when unsafe_math_optimizations are enabled. */
6018c87b03e5Sespie if (flag_unsafe_math_optimizations
6019c87b03e5Sespie && TREE_CODE (arg1) == REAL_CST
6020c87b03e5Sespie && (TREE_CODE (arg0) == PLUS_EXPR
6021c87b03e5Sespie || TREE_CODE (arg0) == MINUS_EXPR)
6022c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6023c87b03e5Sespie && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6024c87b03e5Sespie ? MINUS_EXPR : PLUS_EXPR,
6025c87b03e5Sespie arg1, TREE_OPERAND (arg0, 1), 0))
6026c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (tem))
6027c87b03e5Sespie return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6028c87b03e5Sespie }
6029c87b03e5Sespie
6030c87b03e5Sespie
6031*06dc6460Sespie /* Convert foo++ == CONST into ++foo == CONST + INCR. */
6032*06dc6460Sespie if (TREE_CONSTANT (arg1)
6033*06dc6460Sespie && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
6034*06dc6460Sespie || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
6035c87b03e5Sespie /* This optimization is invalid for ordered comparisons
6036c87b03e5Sespie if CONST+INCR overflows or if foo+incr might overflow.
6037c87b03e5Sespie This optimization is invalid for floating point due to rounding.
6038c87b03e5Sespie For pointer types we assume overflow doesn't happen. */
6039*06dc6460Sespie && (POINTER_TYPE_P (TREE_TYPE (arg0))
6040*06dc6460Sespie || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
6041*06dc6460Sespie && (code == EQ_EXPR || code == NE_EXPR))))
6042c87b03e5Sespie {
6043*06dc6460Sespie tree varop, newconst;
6044c87b03e5Sespie
6045*06dc6460Sespie if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
6046*06dc6460Sespie {
6047*06dc6460Sespie newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
6048*06dc6460Sespie arg1, TREE_OPERAND (arg0, 1)));
6049*06dc6460Sespie varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
6050*06dc6460Sespie TREE_OPERAND (arg0, 0),
6051*06dc6460Sespie TREE_OPERAND (arg0, 1));
6052*06dc6460Sespie }
6053*06dc6460Sespie else
6054*06dc6460Sespie {
6055*06dc6460Sespie newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
6056*06dc6460Sespie arg1, TREE_OPERAND (arg0, 1)));
6057*06dc6460Sespie varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
6058*06dc6460Sespie TREE_OPERAND (arg0, 0),
6059*06dc6460Sespie TREE_OPERAND (arg0, 1));
6060*06dc6460Sespie }
6061*06dc6460Sespie
6062c87b03e5Sespie
6063c87b03e5Sespie /* If VAROP is a reference to a bitfield, we must mask
6064c87b03e5Sespie the constant by the width of the field. */
6065c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6066*06dc6460Sespie && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
6067c87b03e5Sespie {
6068*06dc6460Sespie tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
6069*06dc6460Sespie int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
6070*06dc6460Sespie tree folded_compare, shift;
6071c87b03e5Sespie
6072c87b03e5Sespie /* First check whether the comparison would come out
6073c87b03e5Sespie always the same. If we don't do that we would
6074c87b03e5Sespie change the meaning with the masking. */
6075c87b03e5Sespie folded_compare = fold (build (code, type,
6076c87b03e5Sespie TREE_OPERAND (varop, 0),
6077*06dc6460Sespie arg1));
6078c87b03e5Sespie if (integer_zerop (folded_compare)
6079c87b03e5Sespie || integer_onep (folded_compare))
6080c87b03e5Sespie return omit_one_operand (type, folded_compare, varop);
6081c87b03e5Sespie
6082*06dc6460Sespie shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
6083*06dc6460Sespie 0);
6084*06dc6460Sespie newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
6085*06dc6460Sespie newconst, shift));
6086*06dc6460Sespie newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
6087*06dc6460Sespie newconst, shift));
6088c87b03e5Sespie }
6089c87b03e5Sespie
6090*06dc6460Sespie return fold (build (code, type, varop, newconst));
6091c87b03e5Sespie }
6092c87b03e5Sespie
6093c87b03e5Sespie /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6094c87b03e5Sespie This transformation affects the cases which are handled in later
6095c87b03e5Sespie optimizations involving comparisons with non-negative constants. */
6096c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST
6097c87b03e5Sespie && TREE_CODE (arg0) != INTEGER_CST
6098c87b03e5Sespie && tree_int_cst_sgn (arg1) > 0)
6099c87b03e5Sespie {
6100c87b03e5Sespie switch (code)
6101c87b03e5Sespie {
6102c87b03e5Sespie case GE_EXPR:
6103c87b03e5Sespie code = GT_EXPR;
6104c87b03e5Sespie arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6105c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1);
6106c87b03e5Sespie break;
6107c87b03e5Sespie
6108c87b03e5Sespie case LT_EXPR:
6109c87b03e5Sespie code = LE_EXPR;
6110c87b03e5Sespie arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6111c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1);
6112c87b03e5Sespie break;
6113c87b03e5Sespie
6114c87b03e5Sespie default:
6115c87b03e5Sespie break;
6116c87b03e5Sespie }
6117c87b03e5Sespie }
6118c87b03e5Sespie
6119c87b03e5Sespie /* Comparisons with the highest or lowest possible integer of
6120c87b03e5Sespie the specified size will have known values. */
6121c87b03e5Sespie {
6122c87b03e5Sespie int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6123c87b03e5Sespie
6124c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST
6125c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (arg1)
6126c87b03e5Sespie && width <= HOST_BITS_PER_WIDE_INT
6127c87b03e5Sespie && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6128c87b03e5Sespie || POINTER_TYPE_P (TREE_TYPE (arg1))))
6129c87b03e5Sespie {
6130c87b03e5Sespie unsigned HOST_WIDE_INT signed_max;
6131c87b03e5Sespie unsigned HOST_WIDE_INT max, min;
6132c87b03e5Sespie
6133c87b03e5Sespie signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6134c87b03e5Sespie
6135c87b03e5Sespie if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6136c87b03e5Sespie {
6137c87b03e5Sespie max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6138c87b03e5Sespie min = 0;
6139c87b03e5Sespie }
6140c87b03e5Sespie else
6141c87b03e5Sespie {
6142c87b03e5Sespie max = signed_max;
6143c87b03e5Sespie min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6144c87b03e5Sespie }
6145c87b03e5Sespie
6146c87b03e5Sespie if (TREE_INT_CST_HIGH (arg1) == 0
6147c87b03e5Sespie && TREE_INT_CST_LOW (arg1) == max)
6148c87b03e5Sespie switch (code)
6149c87b03e5Sespie {
6150c87b03e5Sespie case GT_EXPR:
6151c87b03e5Sespie return omit_one_operand (type,
6152c87b03e5Sespie convert (type, integer_zero_node),
6153c87b03e5Sespie arg0);
6154c87b03e5Sespie case GE_EXPR:
6155c87b03e5Sespie code = EQ_EXPR;
6156c87b03e5Sespie TREE_SET_CODE (t, EQ_EXPR);
6157c87b03e5Sespie break;
6158c87b03e5Sespie case LE_EXPR:
6159c87b03e5Sespie return omit_one_operand (type,
6160c87b03e5Sespie convert (type, integer_one_node),
6161c87b03e5Sespie arg0);
6162c87b03e5Sespie case LT_EXPR:
6163c87b03e5Sespie code = NE_EXPR;
6164c87b03e5Sespie TREE_SET_CODE (t, NE_EXPR);
6165c87b03e5Sespie break;
6166c87b03e5Sespie
6167c87b03e5Sespie /* The GE_EXPR and LT_EXPR cases above are not normally
6168c87b03e5Sespie reached because of previous transformations. */
6169c87b03e5Sespie
6170c87b03e5Sespie default:
6171c87b03e5Sespie break;
6172c87b03e5Sespie }
6173c87b03e5Sespie else if (TREE_INT_CST_HIGH (arg1) == 0
6174c87b03e5Sespie && TREE_INT_CST_LOW (arg1) == max - 1)
6175c87b03e5Sespie switch (code)
6176c87b03e5Sespie {
6177c87b03e5Sespie case GT_EXPR:
6178c87b03e5Sespie code = EQ_EXPR;
6179c87b03e5Sespie arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6180c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1);
6181c87b03e5Sespie break;
6182c87b03e5Sespie case LE_EXPR:
6183c87b03e5Sespie code = NE_EXPR;
6184c87b03e5Sespie arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6185c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1);
6186c87b03e5Sespie break;
6187c87b03e5Sespie default:
6188c87b03e5Sespie break;
6189c87b03e5Sespie }
6190c87b03e5Sespie else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6191c87b03e5Sespie && TREE_INT_CST_LOW (arg1) == min)
6192c87b03e5Sespie switch (code)
6193c87b03e5Sespie {
6194c87b03e5Sespie case LT_EXPR:
6195c87b03e5Sespie return omit_one_operand (type,
6196c87b03e5Sespie convert (type, integer_zero_node),
6197c87b03e5Sespie arg0);
6198c87b03e5Sespie case LE_EXPR:
6199c87b03e5Sespie code = EQ_EXPR;
6200c87b03e5Sespie TREE_SET_CODE (t, EQ_EXPR);
6201c87b03e5Sespie break;
6202c87b03e5Sespie
6203c87b03e5Sespie case GE_EXPR:
6204c87b03e5Sespie return omit_one_operand (type,
6205c87b03e5Sespie convert (type, integer_one_node),
6206c87b03e5Sespie arg0);
6207c87b03e5Sespie case GT_EXPR:
6208c87b03e5Sespie code = NE_EXPR;
6209c87b03e5Sespie TREE_SET_CODE (t, NE_EXPR);
6210c87b03e5Sespie break;
6211c87b03e5Sespie
6212c87b03e5Sespie default:
6213c87b03e5Sespie break;
6214c87b03e5Sespie }
6215c87b03e5Sespie else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6216c87b03e5Sespie && TREE_INT_CST_LOW (arg1) == min + 1)
6217c87b03e5Sespie switch (code)
6218c87b03e5Sespie {
6219c87b03e5Sespie case GE_EXPR:
6220c87b03e5Sespie code = NE_EXPR;
6221c87b03e5Sespie arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6222c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1);
6223c87b03e5Sespie break;
6224c87b03e5Sespie case LT_EXPR:
6225c87b03e5Sespie code = EQ_EXPR;
6226c87b03e5Sespie arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6227c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1);
6228c87b03e5Sespie break;
6229c87b03e5Sespie default:
6230c87b03e5Sespie break;
6231c87b03e5Sespie }
6232c87b03e5Sespie
6233c87b03e5Sespie else if (TREE_INT_CST_HIGH (arg1) == 0
6234c87b03e5Sespie && TREE_INT_CST_LOW (arg1) == signed_max
6235c87b03e5Sespie && TREE_UNSIGNED (TREE_TYPE (arg1))
6236c87b03e5Sespie /* signed_type does not work on pointer types. */
6237c87b03e5Sespie && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6238c87b03e5Sespie {
6239c87b03e5Sespie /* The following case also applies to X < signed_max+1
6240c87b03e5Sespie and X >= signed_max+1 because previous transformations. */
6241c87b03e5Sespie if (code == LE_EXPR || code == GT_EXPR)
6242c87b03e5Sespie {
6243c87b03e5Sespie tree st0, st1;
6244c87b03e5Sespie st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6245c87b03e5Sespie st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6246c87b03e5Sespie return fold
6247c87b03e5Sespie (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6248c87b03e5Sespie type, convert (st0, arg0),
6249c87b03e5Sespie convert (st1, integer_zero_node)));
6250c87b03e5Sespie }
6251c87b03e5Sespie }
6252c87b03e5Sespie }
6253c87b03e5Sespie }
6254c87b03e5Sespie
6255c87b03e5Sespie /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6256c87b03e5Sespie a MINUS_EXPR of a constant, we can convert it into a comparison with
6257c87b03e5Sespie a revised constant as long as no overflow occurs. */
6258c87b03e5Sespie if ((code == EQ_EXPR || code == NE_EXPR)
6259c87b03e5Sespie && TREE_CODE (arg1) == INTEGER_CST
6260c87b03e5Sespie && (TREE_CODE (arg0) == PLUS_EXPR
6261c87b03e5Sespie || TREE_CODE (arg0) == MINUS_EXPR)
6262c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6263c87b03e5Sespie && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6264c87b03e5Sespie ? MINUS_EXPR : PLUS_EXPR,
6265c87b03e5Sespie arg1, TREE_OPERAND (arg0, 1), 0))
6266c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (tem))
6267c87b03e5Sespie return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6268c87b03e5Sespie
6269c87b03e5Sespie /* Similarly for a NEGATE_EXPR. */
6270c87b03e5Sespie else if ((code == EQ_EXPR || code == NE_EXPR)
6271c87b03e5Sespie && TREE_CODE (arg0) == NEGATE_EXPR
6272c87b03e5Sespie && TREE_CODE (arg1) == INTEGER_CST
6273c87b03e5Sespie && 0 != (tem = negate_expr (arg1))
6274c87b03e5Sespie && TREE_CODE (tem) == INTEGER_CST
6275c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (tem))
6276c87b03e5Sespie return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6277c87b03e5Sespie
6278c87b03e5Sespie /* If we have X - Y == 0, we can convert that to X == Y and similarly
6279c87b03e5Sespie for !=. Don't do this for ordered comparisons due to overflow. */
6280c87b03e5Sespie else if ((code == NE_EXPR || code == EQ_EXPR)
6281c87b03e5Sespie && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6282c87b03e5Sespie return fold (build (code, type,
6283c87b03e5Sespie TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6284c87b03e5Sespie
6285c87b03e5Sespie /* If we are widening one operand of an integer comparison,
6286c87b03e5Sespie see if the other operand is similarly being widened. Perhaps we
6287c87b03e5Sespie can do the comparison in the narrower type. */
6288c87b03e5Sespie else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6289c87b03e5Sespie && TREE_CODE (arg0) == NOP_EXPR
6290c87b03e5Sespie && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6291c87b03e5Sespie && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6292c87b03e5Sespie && (TREE_TYPE (t1) == TREE_TYPE (tem)
6293c87b03e5Sespie || (TREE_CODE (t1) == INTEGER_CST
6294c87b03e5Sespie && int_fits_type_p (t1, TREE_TYPE (tem)))))
6295c87b03e5Sespie return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6296c87b03e5Sespie
6297c87b03e5Sespie /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6298c87b03e5Sespie constant, we can simplify it. */
6299c87b03e5Sespie else if (TREE_CODE (arg1) == INTEGER_CST
6300c87b03e5Sespie && (TREE_CODE (arg0) == MIN_EXPR
6301c87b03e5Sespie || TREE_CODE (arg0) == MAX_EXPR)
6302c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6303c87b03e5Sespie return optimize_minmax_comparison (t);
6304c87b03e5Sespie
6305c87b03e5Sespie /* If we are comparing an ABS_EXPR with a constant, we can
6306c87b03e5Sespie convert all the cases into explicit comparisons, but they may
6307c87b03e5Sespie well not be faster than doing the ABS and one comparison.
6308c87b03e5Sespie But ABS (X) <= C is a range comparison, which becomes a subtraction
6309c87b03e5Sespie and a comparison, and is probably faster. */
6310c87b03e5Sespie else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6311c87b03e5Sespie && TREE_CODE (arg0) == ABS_EXPR
6312c87b03e5Sespie && ! TREE_SIDE_EFFECTS (arg0)
6313c87b03e5Sespie && (0 != (tem = negate_expr (arg1)))
6314c87b03e5Sespie && TREE_CODE (tem) == INTEGER_CST
6315c87b03e5Sespie && ! TREE_CONSTANT_OVERFLOW (tem))
6316c87b03e5Sespie return fold (build (TRUTH_ANDIF_EXPR, type,
6317c87b03e5Sespie build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6318c87b03e5Sespie build (LE_EXPR, type,
6319c87b03e5Sespie TREE_OPERAND (arg0, 0), arg1)));
6320c87b03e5Sespie
6321c87b03e5Sespie /* If this is an EQ or NE comparison with zero and ARG0 is
6322c87b03e5Sespie (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6323c87b03e5Sespie two operations, but the latter can be done in one less insn
6324c87b03e5Sespie on machines that have only two-operand insns or on which a
6325c87b03e5Sespie constant cannot be the first operand. */
6326c87b03e5Sespie if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6327c87b03e5Sespie && TREE_CODE (arg0) == BIT_AND_EXPR)
6328c87b03e5Sespie {
6329c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6330c87b03e5Sespie && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6331c87b03e5Sespie return
6332c87b03e5Sespie fold (build (code, type,
6333c87b03e5Sespie build (BIT_AND_EXPR, TREE_TYPE (arg0),
6334c87b03e5Sespie build (RSHIFT_EXPR,
6335c87b03e5Sespie TREE_TYPE (TREE_OPERAND (arg0, 0)),
6336c87b03e5Sespie TREE_OPERAND (arg0, 1),
6337c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6338c87b03e5Sespie convert (TREE_TYPE (arg0),
6339c87b03e5Sespie integer_one_node)),
6340c87b03e5Sespie arg1));
6341c87b03e5Sespie else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6342c87b03e5Sespie && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6343c87b03e5Sespie return
6344c87b03e5Sespie fold (build (code, type,
6345c87b03e5Sespie build (BIT_AND_EXPR, TREE_TYPE (arg0),
6346c87b03e5Sespie build (RSHIFT_EXPR,
6347c87b03e5Sespie TREE_TYPE (TREE_OPERAND (arg0, 1)),
6348c87b03e5Sespie TREE_OPERAND (arg0, 0),
6349c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6350c87b03e5Sespie convert (TREE_TYPE (arg0),
6351c87b03e5Sespie integer_one_node)),
6352c87b03e5Sespie arg1));
6353c87b03e5Sespie }
6354c87b03e5Sespie
6355c87b03e5Sespie /* If this is an NE or EQ comparison of zero against the result of a
6356c87b03e5Sespie signed MOD operation whose second operand is a power of 2, make
6357c87b03e5Sespie the MOD operation unsigned since it is simpler and equivalent. */
6358c87b03e5Sespie if ((code == NE_EXPR || code == EQ_EXPR)
6359c87b03e5Sespie && integer_zerop (arg1)
6360c87b03e5Sespie && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6361c87b03e5Sespie && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6362c87b03e5Sespie || TREE_CODE (arg0) == CEIL_MOD_EXPR
6363c87b03e5Sespie || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6364c87b03e5Sespie || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6365c87b03e5Sespie && integer_pow2p (TREE_OPERAND (arg0, 1)))
6366c87b03e5Sespie {
6367c87b03e5Sespie tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6368c87b03e5Sespie tree newmod = build (TREE_CODE (arg0), newtype,
6369c87b03e5Sespie convert (newtype, TREE_OPERAND (arg0, 0)),
6370c87b03e5Sespie convert (newtype, TREE_OPERAND (arg0, 1)));
6371c87b03e5Sespie
6372c87b03e5Sespie return build (code, type, newmod, convert (newtype, arg1));
6373c87b03e5Sespie }
6374c87b03e5Sespie
6375c87b03e5Sespie /* If this is an NE comparison of zero with an AND of one, remove the
6376c87b03e5Sespie comparison since the AND will give the correct value. */
6377c87b03e5Sespie if (code == NE_EXPR && integer_zerop (arg1)
6378c87b03e5Sespie && TREE_CODE (arg0) == BIT_AND_EXPR
6379c87b03e5Sespie && integer_onep (TREE_OPERAND (arg0, 1)))
6380c87b03e5Sespie return convert (type, arg0);
6381c87b03e5Sespie
6382c87b03e5Sespie /* If we have (A & C) == C where C is a power of 2, convert this into
6383c87b03e5Sespie (A & C) != 0. Similarly for NE_EXPR. */
6384c87b03e5Sespie if ((code == EQ_EXPR || code == NE_EXPR)
6385c87b03e5Sespie && TREE_CODE (arg0) == BIT_AND_EXPR
6386c87b03e5Sespie && integer_pow2p (TREE_OPERAND (arg0, 1))
6387c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6388c87b03e5Sespie return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6389c87b03e5Sespie arg0, integer_zero_node));
6390c87b03e5Sespie
6391c87b03e5Sespie /* If we have (A & C) != 0 where C is the sign bit of A, convert
6392c87b03e5Sespie this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6393c87b03e5Sespie if ((code == EQ_EXPR || code == NE_EXPR)
6394c87b03e5Sespie && TREE_CODE (arg0) == BIT_AND_EXPR
6395c87b03e5Sespie && integer_zerop (arg1))
6396c87b03e5Sespie {
6397c87b03e5Sespie tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6398c87b03e5Sespie TREE_OPERAND (arg0, 1));
6399c87b03e5Sespie if (arg00 != NULL_TREE)
6400c87b03e5Sespie {
6401c87b03e5Sespie tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6402c87b03e5Sespie return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6403c87b03e5Sespie convert (stype, arg00),
6404c87b03e5Sespie convert (stype, integer_zero_node)));
6405c87b03e5Sespie }
6406c87b03e5Sespie }
6407c87b03e5Sespie
6408c87b03e5Sespie /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6409c87b03e5Sespie and similarly for >= into !=. */
6410c87b03e5Sespie if ((code == LT_EXPR || code == GE_EXPR)
6411c87b03e5Sespie && TREE_UNSIGNED (TREE_TYPE (arg0))
6412c87b03e5Sespie && TREE_CODE (arg1) == LSHIFT_EXPR
6413c87b03e5Sespie && integer_onep (TREE_OPERAND (arg1, 0)))
6414c87b03e5Sespie return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6415c87b03e5Sespie build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6416c87b03e5Sespie TREE_OPERAND (arg1, 1)),
6417c87b03e5Sespie convert (TREE_TYPE (arg0), integer_zero_node));
6418c87b03e5Sespie
6419c87b03e5Sespie else if ((code == LT_EXPR || code == GE_EXPR)
6420c87b03e5Sespie && TREE_UNSIGNED (TREE_TYPE (arg0))
6421c87b03e5Sespie && (TREE_CODE (arg1) == NOP_EXPR
6422c87b03e5Sespie || TREE_CODE (arg1) == CONVERT_EXPR)
6423c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6424c87b03e5Sespie && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6425c87b03e5Sespie return
6426c87b03e5Sespie build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6427c87b03e5Sespie convert (TREE_TYPE (arg0),
6428c87b03e5Sespie build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6429c87b03e5Sespie TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6430c87b03e5Sespie convert (TREE_TYPE (arg0), integer_zero_node));
6431c87b03e5Sespie
6432c87b03e5Sespie /* Simplify comparison of something with itself. (For IEEE
6433c87b03e5Sespie floating-point, we can only do some of these simplifications.) */
6434c87b03e5Sespie if (operand_equal_p (arg0, arg1, 0))
6435c87b03e5Sespie {
6436c87b03e5Sespie switch (code)
6437c87b03e5Sespie {
6438c87b03e5Sespie case EQ_EXPR:
6439c87b03e5Sespie case GE_EXPR:
6440c87b03e5Sespie case LE_EXPR:
6441c87b03e5Sespie if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6442c87b03e5Sespie return constant_boolean_node (1, type);
6443c87b03e5Sespie code = EQ_EXPR;
6444c87b03e5Sespie TREE_SET_CODE (t, code);
6445c87b03e5Sespie break;
6446c87b03e5Sespie
6447c87b03e5Sespie case NE_EXPR:
6448c87b03e5Sespie /* For NE, we can only do this simplification if integer. */
6449c87b03e5Sespie if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6450c87b03e5Sespie break;
6451c87b03e5Sespie /* ... fall through ... */
6452c87b03e5Sespie case GT_EXPR:
6453c87b03e5Sespie case LT_EXPR:
6454c87b03e5Sespie return constant_boolean_node (0, type);
6455c87b03e5Sespie default:
6456c87b03e5Sespie abort ();
6457c87b03e5Sespie }
6458c87b03e5Sespie }
6459c87b03e5Sespie
6460c87b03e5Sespie /* If we are comparing an expression that just has comparisons
6461c87b03e5Sespie of two integer values, arithmetic expressions of those comparisons,
6462c87b03e5Sespie and constants, we can simplify it. There are only three cases
6463c87b03e5Sespie to check: the two values can either be equal, the first can be
6464c87b03e5Sespie greater, or the second can be greater. Fold the expression for
6465c87b03e5Sespie those three values. Since each value must be 0 or 1, we have
6466c87b03e5Sespie eight possibilities, each of which corresponds to the constant 0
6467c87b03e5Sespie or 1 or one of the six possible comparisons.
6468c87b03e5Sespie
6469c87b03e5Sespie This handles common cases like (a > b) == 0 but also handles
6470c87b03e5Sespie expressions like ((x > y) - (y > x)) > 0, which supposedly
6471c87b03e5Sespie occur in macroized code. */
6472c87b03e5Sespie
6473c87b03e5Sespie if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6474c87b03e5Sespie {
6475c87b03e5Sespie tree cval1 = 0, cval2 = 0;
6476c87b03e5Sespie int save_p = 0;
6477c87b03e5Sespie
6478c87b03e5Sespie if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6479c87b03e5Sespie /* Don't handle degenerate cases here; they should already
6480c87b03e5Sespie have been handled anyway. */
6481c87b03e5Sespie && cval1 != 0 && cval2 != 0
6482c87b03e5Sespie && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6483c87b03e5Sespie && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6484c87b03e5Sespie && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6485c87b03e5Sespie && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6486c87b03e5Sespie && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6487c87b03e5Sespie && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6488c87b03e5Sespie TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6489c87b03e5Sespie {
6490c87b03e5Sespie tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6491c87b03e5Sespie tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6492c87b03e5Sespie
6493c87b03e5Sespie /* We can't just pass T to eval_subst in case cval1 or cval2
6494c87b03e5Sespie was the same as ARG1. */
6495c87b03e5Sespie
6496c87b03e5Sespie tree high_result
6497c87b03e5Sespie = fold (build (code, type,
6498c87b03e5Sespie eval_subst (arg0, cval1, maxval, cval2, minval),
6499c87b03e5Sespie arg1));
6500c87b03e5Sespie tree equal_result
6501c87b03e5Sespie = fold (build (code, type,
6502c87b03e5Sespie eval_subst (arg0, cval1, maxval, cval2, maxval),
6503c87b03e5Sespie arg1));
6504c87b03e5Sespie tree low_result
6505c87b03e5Sespie = fold (build (code, type,
6506c87b03e5Sespie eval_subst (arg0, cval1, minval, cval2, maxval),
6507c87b03e5Sespie arg1));
6508c87b03e5Sespie
6509c87b03e5Sespie /* All three of these results should be 0 or 1. Confirm they
6510c87b03e5Sespie are. Then use those values to select the proper code
6511c87b03e5Sespie to use. */
6512c87b03e5Sespie
6513c87b03e5Sespie if ((integer_zerop (high_result)
6514c87b03e5Sespie || integer_onep (high_result))
6515c87b03e5Sespie && (integer_zerop (equal_result)
6516c87b03e5Sespie || integer_onep (equal_result))
6517c87b03e5Sespie && (integer_zerop (low_result)
6518c87b03e5Sespie || integer_onep (low_result)))
6519c87b03e5Sespie {
6520c87b03e5Sespie /* Make a 3-bit mask with the high-order bit being the
6521c87b03e5Sespie value for `>', the next for '=', and the low for '<'. */
6522c87b03e5Sespie switch ((integer_onep (high_result) * 4)
6523c87b03e5Sespie + (integer_onep (equal_result) * 2)
6524c87b03e5Sespie + integer_onep (low_result))
6525c87b03e5Sespie {
6526c87b03e5Sespie case 0:
6527c87b03e5Sespie /* Always false. */
6528c87b03e5Sespie return omit_one_operand (type, integer_zero_node, arg0);
6529c87b03e5Sespie case 1:
6530c87b03e5Sespie code = LT_EXPR;
6531c87b03e5Sespie break;
6532c87b03e5Sespie case 2:
6533c87b03e5Sespie code = EQ_EXPR;
6534c87b03e5Sespie break;
6535c87b03e5Sespie case 3:
6536c87b03e5Sespie code = LE_EXPR;
6537c87b03e5Sespie break;
6538c87b03e5Sespie case 4:
6539c87b03e5Sespie code = GT_EXPR;
6540c87b03e5Sespie break;
6541c87b03e5Sespie case 5:
6542c87b03e5Sespie code = NE_EXPR;
6543c87b03e5Sespie break;
6544c87b03e5Sespie case 6:
6545c87b03e5Sespie code = GE_EXPR;
6546c87b03e5Sespie break;
6547c87b03e5Sespie case 7:
6548c87b03e5Sespie /* Always true. */
6549c87b03e5Sespie return omit_one_operand (type, integer_one_node, arg0);
6550c87b03e5Sespie }
6551c87b03e5Sespie
6552c87b03e5Sespie t = build (code, type, cval1, cval2);
6553c87b03e5Sespie if (save_p)
6554c87b03e5Sespie return save_expr (t);
6555c87b03e5Sespie else
6556c87b03e5Sespie return fold (t);
6557c87b03e5Sespie }
6558c87b03e5Sespie }
6559c87b03e5Sespie }
6560c87b03e5Sespie
6561c87b03e5Sespie /* If this is a comparison of a field, we may be able to simplify it. */
6562c87b03e5Sespie if (((TREE_CODE (arg0) == COMPONENT_REF
6563c87b03e5Sespie && (*lang_hooks.can_use_bit_fields_p) ())
6564c87b03e5Sespie || TREE_CODE (arg0) == BIT_FIELD_REF)
6565c87b03e5Sespie && (code == EQ_EXPR || code == NE_EXPR)
6566c87b03e5Sespie /* Handle the constant case even without -O
6567c87b03e5Sespie to make sure the warnings are given. */
6568c87b03e5Sespie && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6569c87b03e5Sespie {
6570c87b03e5Sespie t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6571c87b03e5Sespie return t1 ? t1 : t;
6572c87b03e5Sespie }
6573c87b03e5Sespie
6574c87b03e5Sespie /* If this is a comparison of complex values and either or both sides
6575c87b03e5Sespie are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6576c87b03e5Sespie comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6577c87b03e5Sespie This may prevent needless evaluations. */
6578c87b03e5Sespie if ((code == EQ_EXPR || code == NE_EXPR)
6579c87b03e5Sespie && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6580c87b03e5Sespie && (TREE_CODE (arg0) == COMPLEX_EXPR
6581c87b03e5Sespie || TREE_CODE (arg1) == COMPLEX_EXPR
6582c87b03e5Sespie || TREE_CODE (arg0) == COMPLEX_CST
6583c87b03e5Sespie || TREE_CODE (arg1) == COMPLEX_CST))
6584c87b03e5Sespie {
6585c87b03e5Sespie tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6586c87b03e5Sespie tree real0, imag0, real1, imag1;
6587c87b03e5Sespie
6588c87b03e5Sespie arg0 = save_expr (arg0);
6589c87b03e5Sespie arg1 = save_expr (arg1);
6590c87b03e5Sespie real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6591c87b03e5Sespie imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6592c87b03e5Sespie real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6593c87b03e5Sespie imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6594c87b03e5Sespie
6595c87b03e5Sespie return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6596c87b03e5Sespie : TRUTH_ORIF_EXPR),
6597c87b03e5Sespie type,
6598c87b03e5Sespie fold (build (code, type, real0, real1)),
6599c87b03e5Sespie fold (build (code, type, imag0, imag1))));
6600c87b03e5Sespie }
6601c87b03e5Sespie
6602c87b03e5Sespie /* Optimize comparisons of strlen vs zero to a compare of the
6603c87b03e5Sespie first character of the string vs zero. To wit,
6604c87b03e5Sespie strlen(ptr) == 0 => *ptr == 0
6605c87b03e5Sespie strlen(ptr) != 0 => *ptr != 0
6606c87b03e5Sespie Other cases should reduce to one of these two (or a constant)
6607c87b03e5Sespie due to the return value of strlen being unsigned. */
6608c87b03e5Sespie if ((code == EQ_EXPR || code == NE_EXPR)
6609c87b03e5Sespie && integer_zerop (arg1)
6610c87b03e5Sespie && TREE_CODE (arg0) == CALL_EXPR
6611c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6612c87b03e5Sespie {
6613c87b03e5Sespie tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6614c87b03e5Sespie tree arglist;
6615c87b03e5Sespie
6616c87b03e5Sespie if (TREE_CODE (fndecl) == FUNCTION_DECL
6617c87b03e5Sespie && DECL_BUILT_IN (fndecl)
6618c87b03e5Sespie && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6619c87b03e5Sespie && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6620c87b03e5Sespie && (arglist = TREE_OPERAND (arg0, 1))
6621c87b03e5Sespie && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6622c87b03e5Sespie && ! TREE_CHAIN (arglist))
6623c87b03e5Sespie return fold (build (code, type,
6624c87b03e5Sespie build1 (INDIRECT_REF, char_type_node,
6625c87b03e5Sespie TREE_VALUE(arglist)),
6626c87b03e5Sespie integer_zero_node));
6627c87b03e5Sespie }
6628c87b03e5Sespie
6629c87b03e5Sespie /* From here on, the only cases we handle are when the result is
6630c87b03e5Sespie known to be a constant.
6631c87b03e5Sespie
6632c87b03e5Sespie To compute GT, swap the arguments and do LT.
6633c87b03e5Sespie To compute GE, do LT and invert the result.
6634c87b03e5Sespie To compute LE, swap the arguments, do LT and invert the result.
6635c87b03e5Sespie To compute NE, do EQ and invert the result.
6636c87b03e5Sespie
6637c87b03e5Sespie Therefore, the code below must handle only EQ and LT. */
6638c87b03e5Sespie
6639c87b03e5Sespie if (code == LE_EXPR || code == GT_EXPR)
6640c87b03e5Sespie {
6641c87b03e5Sespie tem = arg0, arg0 = arg1, arg1 = tem;
6642c87b03e5Sespie code = swap_tree_comparison (code);
6643c87b03e5Sespie }
6644c87b03e5Sespie
6645c87b03e5Sespie /* Note that it is safe to invert for real values here because we
6646c87b03e5Sespie will check below in the one case that it matters. */
6647c87b03e5Sespie
6648c87b03e5Sespie t1 = NULL_TREE;
6649c87b03e5Sespie invert = 0;
6650c87b03e5Sespie if (code == NE_EXPR || code == GE_EXPR)
6651c87b03e5Sespie {
6652c87b03e5Sespie invert = 1;
6653c87b03e5Sespie code = invert_tree_comparison (code);
6654c87b03e5Sespie }
6655c87b03e5Sespie
6656c87b03e5Sespie /* Compute a result for LT or EQ if args permit;
6657c87b03e5Sespie otherwise return T. */
6658c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6659c87b03e5Sespie {
6660c87b03e5Sespie if (code == EQ_EXPR)
6661c87b03e5Sespie t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6662c87b03e5Sespie else
6663c87b03e5Sespie t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6664c87b03e5Sespie ? INT_CST_LT_UNSIGNED (arg0, arg1)
6665c87b03e5Sespie : INT_CST_LT (arg0, arg1)),
6666c87b03e5Sespie 0);
6667c87b03e5Sespie }
6668c87b03e5Sespie
6669c87b03e5Sespie #if 0 /* This is no longer useful, but breaks some real code. */
6670c87b03e5Sespie /* Assume a nonexplicit constant cannot equal an explicit one,
6671c87b03e5Sespie since such code would be undefined anyway.
6672c87b03e5Sespie Exception: on sysvr4, using #pragma weak,
6673c87b03e5Sespie a label can come out as 0. */
6674c87b03e5Sespie else if (TREE_CODE (arg1) == INTEGER_CST
6675c87b03e5Sespie && !integer_zerop (arg1)
6676c87b03e5Sespie && TREE_CONSTANT (arg0)
6677c87b03e5Sespie && TREE_CODE (arg0) == ADDR_EXPR
6678c87b03e5Sespie && code == EQ_EXPR)
6679c87b03e5Sespie t1 = build_int_2 (0, 0);
6680c87b03e5Sespie #endif
6681c87b03e5Sespie /* Two real constants can be compared explicitly. */
6682c87b03e5Sespie else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6683c87b03e5Sespie {
6684c87b03e5Sespie /* If either operand is a NaN, the result is false with two
6685c87b03e5Sespie exceptions: First, an NE_EXPR is true on NaNs, but that case
6686c87b03e5Sespie is already handled correctly since we will be inverting the
6687c87b03e5Sespie result for NE_EXPR. Second, if we had inverted a LE_EXPR
6688c87b03e5Sespie or a GE_EXPR into a LT_EXPR, we must return true so that it
6689c87b03e5Sespie will be inverted into false. */
6690c87b03e5Sespie
6691c87b03e5Sespie if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6692c87b03e5Sespie || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6693c87b03e5Sespie t1 = build_int_2 (invert && code == LT_EXPR, 0);
6694c87b03e5Sespie
6695c87b03e5Sespie else if (code == EQ_EXPR)
6696c87b03e5Sespie t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6697c87b03e5Sespie TREE_REAL_CST (arg1)),
6698c87b03e5Sespie 0);
6699c87b03e5Sespie else
6700c87b03e5Sespie t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6701c87b03e5Sespie TREE_REAL_CST (arg1)),
6702c87b03e5Sespie 0);
6703c87b03e5Sespie }
6704c87b03e5Sespie
6705c87b03e5Sespie if (t1 == NULL_TREE)
6706c87b03e5Sespie return t;
6707c87b03e5Sespie
6708c87b03e5Sespie if (invert)
6709c87b03e5Sespie TREE_INT_CST_LOW (t1) ^= 1;
6710c87b03e5Sespie
6711c87b03e5Sespie TREE_TYPE (t1) = type;
6712c87b03e5Sespie if (TREE_CODE (type) == BOOLEAN_TYPE)
6713c87b03e5Sespie return (*lang_hooks.truthvalue_conversion) (t1);
6714c87b03e5Sespie return t1;
6715c87b03e5Sespie
6716c87b03e5Sespie case COND_EXPR:
6717c87b03e5Sespie /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6718c87b03e5Sespie so all simple results must be passed through pedantic_non_lvalue. */
6719c87b03e5Sespie if (TREE_CODE (arg0) == INTEGER_CST)
6720*06dc6460Sespie {
6721*06dc6460Sespie tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
6722*06dc6460Sespie /* Only optimize constant conditions when the selected branch
6723*06dc6460Sespie has the same type as the COND_EXPR. This avoids optimizing
6724*06dc6460Sespie away "c ? x : throw", where the throw has a void type. */
6725*06dc6460Sespie if (! VOID_TYPE_P (TREE_TYPE (tem))
6726*06dc6460Sespie || VOID_TYPE_P (TREE_TYPE (t)))
6727*06dc6460Sespie return pedantic_non_lvalue (tem);
6728*06dc6460Sespie return t;
6729*06dc6460Sespie }
6730*06dc6460Sespie if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6731c87b03e5Sespie return pedantic_omit_one_operand (type, arg1, arg0);
6732c87b03e5Sespie
6733c87b03e5Sespie /* If the second operand is zero, invert the comparison and swap
6734c87b03e5Sespie the second and third operands. Likewise if the second operand
6735c87b03e5Sespie is constant and the third is not or if the third operand is
6736c87b03e5Sespie equivalent to the first operand of the comparison. */
6737c87b03e5Sespie
6738c87b03e5Sespie if (integer_zerop (arg1)
6739c87b03e5Sespie || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6740c87b03e5Sespie || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6741c87b03e5Sespie && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6742c87b03e5Sespie TREE_OPERAND (t, 2),
6743c87b03e5Sespie TREE_OPERAND (arg0, 1))))
6744c87b03e5Sespie {
6745c87b03e5Sespie /* See if this can be inverted. If it can't, possibly because
6746c87b03e5Sespie it was a floating-point inequality comparison, don't do
6747c87b03e5Sespie anything. */
6748c87b03e5Sespie tem = invert_truthvalue (arg0);
6749c87b03e5Sespie
6750c87b03e5Sespie if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6751c87b03e5Sespie {
6752c87b03e5Sespie t = build (code, type, tem,
6753c87b03e5Sespie TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6754c87b03e5Sespie arg0 = tem;
6755c87b03e5Sespie /* arg1 should be the first argument of the new T. */
6756c87b03e5Sespie arg1 = TREE_OPERAND (t, 1);
6757c87b03e5Sespie STRIP_NOPS (arg1);
6758c87b03e5Sespie }
6759c87b03e5Sespie }
6760c87b03e5Sespie
6761c87b03e5Sespie /* If we have A op B ? A : C, we may be able to convert this to a
6762c87b03e5Sespie simpler expression, depending on the operation and the values
6763c87b03e5Sespie of B and C. Signed zeros prevent all of these transformations,
6764c87b03e5Sespie for reasons given above each one. */
6765c87b03e5Sespie
6766c87b03e5Sespie if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6767c87b03e5Sespie && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6768c87b03e5Sespie arg1, TREE_OPERAND (arg0, 1))
6769c87b03e5Sespie && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6770c87b03e5Sespie {
6771c87b03e5Sespie tree arg2 = TREE_OPERAND (t, 2);
6772c87b03e5Sespie enum tree_code comp_code = TREE_CODE (arg0);
6773c87b03e5Sespie
6774c87b03e5Sespie STRIP_NOPS (arg2);
6775c87b03e5Sespie
6776c87b03e5Sespie /* If we have A op 0 ? A : -A, consider applying the following
6777c87b03e5Sespie transformations:
6778c87b03e5Sespie
6779c87b03e5Sespie A == 0? A : -A same as -A
6780c87b03e5Sespie A != 0? A : -A same as A
6781c87b03e5Sespie A >= 0? A : -A same as abs (A)
6782c87b03e5Sespie A > 0? A : -A same as abs (A)
6783c87b03e5Sespie A <= 0? A : -A same as -abs (A)
6784c87b03e5Sespie A < 0? A : -A same as -abs (A)
6785c87b03e5Sespie
6786c87b03e5Sespie None of these transformations work for modes with signed
6787c87b03e5Sespie zeros. If A is +/-0, the first two transformations will
6788c87b03e5Sespie change the sign of the result (from +0 to -0, or vice
6789c87b03e5Sespie versa). The last four will fix the sign of the result,
6790c87b03e5Sespie even though the original expressions could be positive or
6791c87b03e5Sespie negative, depending on the sign of A.
6792c87b03e5Sespie
6793c87b03e5Sespie Note that all these transformations are correct if A is
6794c87b03e5Sespie NaN, since the two alternatives (A and -A) are also NaNs. */
6795c87b03e5Sespie if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6796c87b03e5Sespie ? real_zerop (TREE_OPERAND (arg0, 1))
6797c87b03e5Sespie : integer_zerop (TREE_OPERAND (arg0, 1)))
6798c87b03e5Sespie && TREE_CODE (arg2) == NEGATE_EXPR
6799c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6800c87b03e5Sespie switch (comp_code)
6801c87b03e5Sespie {
6802c87b03e5Sespie case EQ_EXPR:
6803c87b03e5Sespie return
6804c87b03e5Sespie pedantic_non_lvalue
6805c87b03e5Sespie (convert (type,
6806c87b03e5Sespie negate_expr
6807c87b03e5Sespie (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6808c87b03e5Sespie arg1))));
6809c87b03e5Sespie case NE_EXPR:
6810c87b03e5Sespie return pedantic_non_lvalue (convert (type, arg1));
6811c87b03e5Sespie case GE_EXPR:
6812c87b03e5Sespie case GT_EXPR:
6813c87b03e5Sespie if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6814c87b03e5Sespie arg1 = convert ((*lang_hooks.types.signed_type)
6815c87b03e5Sespie (TREE_TYPE (arg1)), arg1);
6816c87b03e5Sespie return pedantic_non_lvalue
6817c87b03e5Sespie (convert (type, fold (build1 (ABS_EXPR,
6818c87b03e5Sespie TREE_TYPE (arg1), arg1))));
6819c87b03e5Sespie case LE_EXPR:
6820c87b03e5Sespie case LT_EXPR:
6821c87b03e5Sespie if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6822c87b03e5Sespie arg1 = convert ((lang_hooks.types.signed_type)
6823c87b03e5Sespie (TREE_TYPE (arg1)), arg1);
6824c87b03e5Sespie return pedantic_non_lvalue
6825c87b03e5Sespie (negate_expr (convert (type,
6826c87b03e5Sespie fold (build1 (ABS_EXPR,
6827c87b03e5Sespie TREE_TYPE (arg1),
6828c87b03e5Sespie arg1)))));
6829c87b03e5Sespie default:
6830c87b03e5Sespie abort ();
6831c87b03e5Sespie }
6832c87b03e5Sespie
6833c87b03e5Sespie /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6834c87b03e5Sespie A == 0 ? A : 0 is always 0 unless A is -0. Note that
6835c87b03e5Sespie both transformations are correct when A is NaN: A != 0
6836c87b03e5Sespie is then true, and A == 0 is false. */
6837c87b03e5Sespie
6838c87b03e5Sespie if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6839c87b03e5Sespie {
6840c87b03e5Sespie if (comp_code == NE_EXPR)
6841c87b03e5Sespie return pedantic_non_lvalue (convert (type, arg1));
6842c87b03e5Sespie else if (comp_code == EQ_EXPR)
6843c87b03e5Sespie return pedantic_non_lvalue (convert (type, integer_zero_node));
6844c87b03e5Sespie }
6845c87b03e5Sespie
6846c87b03e5Sespie /* Try some transformations of A op B ? A : B.
6847c87b03e5Sespie
6848c87b03e5Sespie A == B? A : B same as B
6849c87b03e5Sespie A != B? A : B same as A
6850c87b03e5Sespie A >= B? A : B same as max (A, B)
6851c87b03e5Sespie A > B? A : B same as max (B, A)
6852c87b03e5Sespie A <= B? A : B same as min (A, B)
6853c87b03e5Sespie A < B? A : B same as min (B, A)
6854c87b03e5Sespie
6855c87b03e5Sespie As above, these transformations don't work in the presence
6856c87b03e5Sespie of signed zeros. For example, if A and B are zeros of
6857c87b03e5Sespie opposite sign, the first two transformations will change
6858c87b03e5Sespie the sign of the result. In the last four, the original
6859c87b03e5Sespie expressions give different results for (A=+0, B=-0) and
6860c87b03e5Sespie (A=-0, B=+0), but the transformed expressions do not.
6861c87b03e5Sespie
6862c87b03e5Sespie The first two transformations are correct if either A or B
6863c87b03e5Sespie is a NaN. In the first transformation, the condition will
6864c87b03e5Sespie be false, and B will indeed be chosen. In the case of the
6865c87b03e5Sespie second transformation, the condition A != B will be true,
6866c87b03e5Sespie and A will be chosen.
6867c87b03e5Sespie
6868c87b03e5Sespie The conversions to max() and min() are not correct if B is
6869c87b03e5Sespie a number and A is not. The conditions in the original
6870c87b03e5Sespie expressions will be false, so all four give B. The min()
6871c87b03e5Sespie and max() versions would give a NaN instead. */
6872c87b03e5Sespie if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6873c87b03e5Sespie arg2, TREE_OPERAND (arg0, 0)))
6874c87b03e5Sespie {
6875c87b03e5Sespie tree comp_op0 = TREE_OPERAND (arg0, 0);
6876c87b03e5Sespie tree comp_op1 = TREE_OPERAND (arg0, 1);
6877c87b03e5Sespie tree comp_type = TREE_TYPE (comp_op0);
6878c87b03e5Sespie
6879c87b03e5Sespie /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6880c87b03e5Sespie if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6881c87b03e5Sespie comp_type = type;
6882c87b03e5Sespie
6883c87b03e5Sespie switch (comp_code)
6884c87b03e5Sespie {
6885c87b03e5Sespie case EQ_EXPR:
6886c87b03e5Sespie return pedantic_non_lvalue (convert (type, arg2));
6887c87b03e5Sespie case NE_EXPR:
6888c87b03e5Sespie return pedantic_non_lvalue (convert (type, arg1));
6889c87b03e5Sespie case LE_EXPR:
6890c87b03e5Sespie case LT_EXPR:
6891c87b03e5Sespie /* In C++ a ?: expression can be an lvalue, so put the
6892c87b03e5Sespie operand which will be used if they are equal first
6893c87b03e5Sespie so that we can convert this back to the
6894c87b03e5Sespie corresponding COND_EXPR. */
6895c87b03e5Sespie if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6896c87b03e5Sespie return pedantic_non_lvalue
6897c87b03e5Sespie (convert (type, fold (build (MIN_EXPR, comp_type,
6898c87b03e5Sespie (comp_code == LE_EXPR
6899c87b03e5Sespie ? comp_op0 : comp_op1),
6900c87b03e5Sespie (comp_code == LE_EXPR
6901c87b03e5Sespie ? comp_op1 : comp_op0)))));
6902c87b03e5Sespie break;
6903c87b03e5Sespie case GE_EXPR:
6904c87b03e5Sespie case GT_EXPR:
6905c87b03e5Sespie if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6906c87b03e5Sespie return pedantic_non_lvalue
6907c87b03e5Sespie (convert (type, fold (build (MAX_EXPR, comp_type,
6908c87b03e5Sespie (comp_code == GE_EXPR
6909c87b03e5Sespie ? comp_op0 : comp_op1),
6910c87b03e5Sespie (comp_code == GE_EXPR
6911c87b03e5Sespie ? comp_op1 : comp_op0)))));
6912c87b03e5Sespie break;
6913c87b03e5Sespie default:
6914c87b03e5Sespie abort ();
6915c87b03e5Sespie }
6916c87b03e5Sespie }
6917c87b03e5Sespie
6918c87b03e5Sespie /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6919c87b03e5Sespie we might still be able to simplify this. For example,
6920c87b03e5Sespie if C1 is one less or one more than C2, this might have started
6921c87b03e5Sespie out as a MIN or MAX and been transformed by this function.
6922c87b03e5Sespie Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6923c87b03e5Sespie
6924c87b03e5Sespie if (INTEGRAL_TYPE_P (type)
6925c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6926c87b03e5Sespie && TREE_CODE (arg2) == INTEGER_CST)
6927c87b03e5Sespie switch (comp_code)
6928c87b03e5Sespie {
6929c87b03e5Sespie case EQ_EXPR:
6930c87b03e5Sespie /* We can replace A with C1 in this case. */
6931c87b03e5Sespie arg1 = convert (type, TREE_OPERAND (arg0, 1));
6932c87b03e5Sespie t = build (code, type, TREE_OPERAND (t, 0), arg1,
6933c87b03e5Sespie TREE_OPERAND (t, 2));
6934c87b03e5Sespie break;
6935c87b03e5Sespie
6936c87b03e5Sespie case LT_EXPR:
6937c87b03e5Sespie /* If C1 is C2 + 1, this is min(A, C2). */
6938c87b03e5Sespie if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6939c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
6940c87b03e5Sespie const_binop (PLUS_EXPR, arg2,
6941c87b03e5Sespie integer_one_node, 0), 1))
6942c87b03e5Sespie return pedantic_non_lvalue
6943c87b03e5Sespie (fold (build (MIN_EXPR, type, arg1, arg2)));
6944c87b03e5Sespie break;
6945c87b03e5Sespie
6946c87b03e5Sespie case LE_EXPR:
6947c87b03e5Sespie /* If C1 is C2 - 1, this is min(A, C2). */
6948c87b03e5Sespie if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6949c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
6950c87b03e5Sespie const_binop (MINUS_EXPR, arg2,
6951c87b03e5Sespie integer_one_node, 0), 1))
6952c87b03e5Sespie return pedantic_non_lvalue
6953c87b03e5Sespie (fold (build (MIN_EXPR, type, arg1, arg2)));
6954c87b03e5Sespie break;
6955c87b03e5Sespie
6956c87b03e5Sespie case GT_EXPR:
6957c87b03e5Sespie /* If C1 is C2 - 1, this is max(A, C2). */
6958c87b03e5Sespie if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6959c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
6960c87b03e5Sespie const_binop (MINUS_EXPR, arg2,
6961c87b03e5Sespie integer_one_node, 0), 1))
6962c87b03e5Sespie return pedantic_non_lvalue
6963c87b03e5Sespie (fold (build (MAX_EXPR, type, arg1, arg2)));
6964c87b03e5Sespie break;
6965c87b03e5Sespie
6966c87b03e5Sespie case GE_EXPR:
6967c87b03e5Sespie /* If C1 is C2 + 1, this is max(A, C2). */
6968c87b03e5Sespie if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6969c87b03e5Sespie && operand_equal_p (TREE_OPERAND (arg0, 1),
6970c87b03e5Sespie const_binop (PLUS_EXPR, arg2,
6971c87b03e5Sespie integer_one_node, 0), 1))
6972c87b03e5Sespie return pedantic_non_lvalue
6973c87b03e5Sespie (fold (build (MAX_EXPR, type, arg1, arg2)));
6974c87b03e5Sespie break;
6975c87b03e5Sespie case NE_EXPR:
6976c87b03e5Sespie break;
6977c87b03e5Sespie default:
6978c87b03e5Sespie abort ();
6979c87b03e5Sespie }
6980c87b03e5Sespie }
6981c87b03e5Sespie
6982c87b03e5Sespie /* If the second operand is simpler than the third, swap them
6983c87b03e5Sespie since that produces better jump optimization results. */
6984c87b03e5Sespie if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6985c87b03e5Sespie || TREE_CODE (arg1) == SAVE_EXPR)
6986c87b03e5Sespie && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6987c87b03e5Sespie || DECL_P (TREE_OPERAND (t, 2))
6988c87b03e5Sespie || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6989c87b03e5Sespie {
6990c87b03e5Sespie /* See if this can be inverted. If it can't, possibly because
6991c87b03e5Sespie it was a floating-point inequality comparison, don't do
6992c87b03e5Sespie anything. */
6993c87b03e5Sespie tem = invert_truthvalue (arg0);
6994c87b03e5Sespie
6995c87b03e5Sespie if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6996c87b03e5Sespie {
6997c87b03e5Sespie t = build (code, type, tem,
6998c87b03e5Sespie TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6999c87b03e5Sespie arg0 = tem;
7000c87b03e5Sespie /* arg1 should be the first argument of the new T. */
7001c87b03e5Sespie arg1 = TREE_OPERAND (t, 1);
7002c87b03e5Sespie STRIP_NOPS (arg1);
7003c87b03e5Sespie }
7004c87b03e5Sespie }
7005c87b03e5Sespie
7006c87b03e5Sespie /* Convert A ? 1 : 0 to simply A. */
7007c87b03e5Sespie if (integer_onep (TREE_OPERAND (t, 1))
7008c87b03e5Sespie && integer_zerop (TREE_OPERAND (t, 2))
7009c87b03e5Sespie /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7010c87b03e5Sespie call to fold will try to move the conversion inside
7011c87b03e5Sespie a COND, which will recurse. In that case, the COND_EXPR
7012c87b03e5Sespie is probably the best choice, so leave it alone. */
7013c87b03e5Sespie && type == TREE_TYPE (arg0))
7014c87b03e5Sespie return pedantic_non_lvalue (arg0);
7015c87b03e5Sespie
7016c87b03e5Sespie /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7017c87b03e5Sespie over COND_EXPR in cases such as floating point comparisons. */
7018c87b03e5Sespie if (integer_zerop (TREE_OPERAND (t, 1))
7019c87b03e5Sespie && integer_onep (TREE_OPERAND (t, 2))
7020c87b03e5Sespie && truth_value_p (TREE_CODE (arg0)))
7021c87b03e5Sespie return pedantic_non_lvalue (convert (type,
7022c87b03e5Sespie invert_truthvalue (arg0)));
7023c87b03e5Sespie
7024c87b03e5Sespie /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7025c87b03e5Sespie operation is simply A & 2. */
7026c87b03e5Sespie
7027c87b03e5Sespie if (integer_zerop (TREE_OPERAND (t, 2))
7028c87b03e5Sespie && TREE_CODE (arg0) == NE_EXPR
7029c87b03e5Sespie && integer_zerop (TREE_OPERAND (arg0, 1))
7030c87b03e5Sespie && integer_pow2p (arg1)
7031c87b03e5Sespie && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7032c87b03e5Sespie && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7033c87b03e5Sespie arg1, 1))
7034c87b03e5Sespie return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7035c87b03e5Sespie
7036c87b03e5Sespie /* Convert A ? B : 0 into A && B if A and B are truth values. */
7037c87b03e5Sespie if (integer_zerop (TREE_OPERAND (t, 2))
7038c87b03e5Sespie && truth_value_p (TREE_CODE (arg0))
7039c87b03e5Sespie && truth_value_p (TREE_CODE (arg1)))
7040c87b03e5Sespie return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7041c87b03e5Sespie arg0, arg1)));
7042c87b03e5Sespie
7043c87b03e5Sespie /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7044c87b03e5Sespie if (integer_onep (TREE_OPERAND (t, 2))
7045c87b03e5Sespie && truth_value_p (TREE_CODE (arg0))
7046c87b03e5Sespie && truth_value_p (TREE_CODE (arg1)))
7047c87b03e5Sespie {
7048c87b03e5Sespie /* Only perform transformation if ARG0 is easily inverted. */
7049c87b03e5Sespie tem = invert_truthvalue (arg0);
7050c87b03e5Sespie if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7051c87b03e5Sespie return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7052c87b03e5Sespie tem, arg1)));
7053c87b03e5Sespie }
7054c87b03e5Sespie
7055c87b03e5Sespie return t;
7056c87b03e5Sespie
7057c87b03e5Sespie case COMPOUND_EXPR:
7058c87b03e5Sespie /* When pedantic, a compound expression can be neither an lvalue
7059c87b03e5Sespie nor an integer constant expression. */
7060c87b03e5Sespie if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7061c87b03e5Sespie return t;
7062c87b03e5Sespie /* Don't let (0, 0) be null pointer constant. */
7063c87b03e5Sespie if (integer_zerop (arg1))
7064c87b03e5Sespie return build1 (NOP_EXPR, type, arg1);
7065c87b03e5Sespie return convert (type, arg1);
7066c87b03e5Sespie
7067c87b03e5Sespie case COMPLEX_EXPR:
7068c87b03e5Sespie if (wins)
7069c87b03e5Sespie return build_complex (type, arg0, arg1);
7070c87b03e5Sespie return t;
7071c87b03e5Sespie
7072c87b03e5Sespie case REALPART_EXPR:
7073c87b03e5Sespie if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7074c87b03e5Sespie return t;
7075c87b03e5Sespie else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7076c87b03e5Sespie return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7077c87b03e5Sespie TREE_OPERAND (arg0, 1));
7078c87b03e5Sespie else if (TREE_CODE (arg0) == COMPLEX_CST)
7079c87b03e5Sespie return TREE_REALPART (arg0);
7080c87b03e5Sespie else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7081c87b03e5Sespie return fold (build (TREE_CODE (arg0), type,
7082c87b03e5Sespie fold (build1 (REALPART_EXPR, type,
7083c87b03e5Sespie TREE_OPERAND (arg0, 0))),
7084c87b03e5Sespie fold (build1 (REALPART_EXPR,
7085c87b03e5Sespie type, TREE_OPERAND (arg0, 1)))));
7086c87b03e5Sespie return t;
7087c87b03e5Sespie
7088c87b03e5Sespie case IMAGPART_EXPR:
7089c87b03e5Sespie if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7090c87b03e5Sespie return convert (type, integer_zero_node);
7091c87b03e5Sespie else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7092c87b03e5Sespie return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7093c87b03e5Sespie TREE_OPERAND (arg0, 0));
7094c87b03e5Sespie else if (TREE_CODE (arg0) == COMPLEX_CST)
7095c87b03e5Sespie return TREE_IMAGPART (arg0);
7096c87b03e5Sespie else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7097c87b03e5Sespie return fold (build (TREE_CODE (arg0), type,
7098c87b03e5Sespie fold (build1 (IMAGPART_EXPR, type,
7099c87b03e5Sespie TREE_OPERAND (arg0, 0))),
7100c87b03e5Sespie fold (build1 (IMAGPART_EXPR, type,
7101c87b03e5Sespie TREE_OPERAND (arg0, 1)))));
7102c87b03e5Sespie return t;
7103c87b03e5Sespie
7104c87b03e5Sespie /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7105c87b03e5Sespie appropriate. */
7106c87b03e5Sespie case CLEANUP_POINT_EXPR:
7107c87b03e5Sespie if (! has_cleanups (arg0))
7108c87b03e5Sespie return TREE_OPERAND (t, 0);
7109c87b03e5Sespie
7110c87b03e5Sespie {
7111c87b03e5Sespie enum tree_code code0 = TREE_CODE (arg0);
7112c87b03e5Sespie int kind0 = TREE_CODE_CLASS (code0);
7113c87b03e5Sespie tree arg00 = TREE_OPERAND (arg0, 0);
7114c87b03e5Sespie tree arg01;
7115c87b03e5Sespie
7116c87b03e5Sespie if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7117c87b03e5Sespie return fold (build1 (code0, type,
7118c87b03e5Sespie fold (build1 (CLEANUP_POINT_EXPR,
7119c87b03e5Sespie TREE_TYPE (arg00), arg00))));
7120c87b03e5Sespie
7121c87b03e5Sespie if (kind0 == '<' || kind0 == '2'
7122c87b03e5Sespie || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7123c87b03e5Sespie || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7124c87b03e5Sespie || code0 == TRUTH_XOR_EXPR)
7125c87b03e5Sespie {
7126c87b03e5Sespie arg01 = TREE_OPERAND (arg0, 1);
7127c87b03e5Sespie
7128c87b03e5Sespie if (TREE_CONSTANT (arg00)
7129c87b03e5Sespie || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7130c87b03e5Sespie && ! has_cleanups (arg00)))
7131c87b03e5Sespie return fold (build (code0, type, arg00,
7132c87b03e5Sespie fold (build1 (CLEANUP_POINT_EXPR,
7133c87b03e5Sespie TREE_TYPE (arg01), arg01))));
7134c87b03e5Sespie
7135c87b03e5Sespie if (TREE_CONSTANT (arg01))
7136c87b03e5Sespie return fold (build (code0, type,
7137c87b03e5Sespie fold (build1 (CLEANUP_POINT_EXPR,
7138c87b03e5Sespie TREE_TYPE (arg00), arg00)),
7139c87b03e5Sespie arg01));
7140c87b03e5Sespie }
7141c87b03e5Sespie
7142c87b03e5Sespie return t;
7143c87b03e5Sespie }
7144c87b03e5Sespie
7145c87b03e5Sespie case CALL_EXPR:
7146c87b03e5Sespie /* Check for a built-in function. */
7147c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7148c87b03e5Sespie && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7149c87b03e5Sespie == FUNCTION_DECL)
7150c87b03e5Sespie && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7151c87b03e5Sespie {
7152c87b03e5Sespie tree tmp = fold_builtin (expr);
7153c87b03e5Sespie if (tmp)
7154c87b03e5Sespie return tmp;
7155c87b03e5Sespie }
7156c87b03e5Sespie return t;
7157c87b03e5Sespie
7158c87b03e5Sespie default:
7159c87b03e5Sespie return t;
7160c87b03e5Sespie } /* switch (code) */
7161c87b03e5Sespie }
7162c87b03e5Sespie
7163c87b03e5Sespie /* Determine if first argument is a multiple of second argument. Return 0 if
7164c87b03e5Sespie it is not, or we cannot easily determined it to be.
7165c87b03e5Sespie
7166c87b03e5Sespie An example of the sort of thing we care about (at this point; this routine
7167c87b03e5Sespie could surely be made more general, and expanded to do what the *_DIV_EXPR's
7168c87b03e5Sespie fold cases do now) is discovering that
7169c87b03e5Sespie
7170c87b03e5Sespie SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7171c87b03e5Sespie
7172c87b03e5Sespie is a multiple of
7173c87b03e5Sespie
7174c87b03e5Sespie SAVE_EXPR (J * 8)
7175c87b03e5Sespie
7176c87b03e5Sespie when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7177c87b03e5Sespie
7178c87b03e5Sespie This code also handles discovering that
7179c87b03e5Sespie
7180c87b03e5Sespie SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7181c87b03e5Sespie
7182c87b03e5Sespie is a multiple of 8 so we don't have to worry about dealing with a
7183c87b03e5Sespie possible remainder.
7184c87b03e5Sespie
7185c87b03e5Sespie Note that we *look* inside a SAVE_EXPR only to determine how it was
7186c87b03e5Sespie calculated; it is not safe for fold to do much of anything else with the
7187c87b03e5Sespie internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7188c87b03e5Sespie at run time. For example, the latter example above *cannot* be implemented
7189c87b03e5Sespie as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7190c87b03e5Sespie evaluation time of the original SAVE_EXPR is not necessarily the same at
7191c87b03e5Sespie the time the new expression is evaluated. The only optimization of this
7192c87b03e5Sespie sort that would be valid is changing
7193c87b03e5Sespie
7194c87b03e5Sespie SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7195c87b03e5Sespie
7196c87b03e5Sespie divided by 8 to
7197c87b03e5Sespie
7198c87b03e5Sespie SAVE_EXPR (I) * SAVE_EXPR (J)
7199c87b03e5Sespie
7200c87b03e5Sespie (where the same SAVE_EXPR (J) is used in the original and the
7201c87b03e5Sespie transformed version). */
7202c87b03e5Sespie
7203c87b03e5Sespie static int
multiple_of_p(type,top,bottom)7204c87b03e5Sespie multiple_of_p (type, top, bottom)
7205c87b03e5Sespie tree type;
7206c87b03e5Sespie tree top;
7207c87b03e5Sespie tree bottom;
7208c87b03e5Sespie {
7209c87b03e5Sespie if (operand_equal_p (top, bottom, 0))
7210c87b03e5Sespie return 1;
7211c87b03e5Sespie
7212c87b03e5Sespie if (TREE_CODE (type) != INTEGER_TYPE)
7213c87b03e5Sespie return 0;
7214c87b03e5Sespie
7215c87b03e5Sespie switch (TREE_CODE (top))
7216c87b03e5Sespie {
7217c87b03e5Sespie case MULT_EXPR:
7218c87b03e5Sespie return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7219c87b03e5Sespie || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7220c87b03e5Sespie
7221c87b03e5Sespie case PLUS_EXPR:
7222c87b03e5Sespie case MINUS_EXPR:
7223c87b03e5Sespie return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7224c87b03e5Sespie && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7225c87b03e5Sespie
7226c87b03e5Sespie case LSHIFT_EXPR:
7227c87b03e5Sespie if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7228c87b03e5Sespie {
7229c87b03e5Sespie tree op1, t1;
7230c87b03e5Sespie
7231c87b03e5Sespie op1 = TREE_OPERAND (top, 1);
7232c87b03e5Sespie /* const_binop may not detect overflow correctly,
7233c87b03e5Sespie so check for it explicitly here. */
7234c87b03e5Sespie if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7235c87b03e5Sespie > TREE_INT_CST_LOW (op1)
7236c87b03e5Sespie && TREE_INT_CST_HIGH (op1) == 0
7237c87b03e5Sespie && 0 != (t1 = convert (type,
7238c87b03e5Sespie const_binop (LSHIFT_EXPR, size_one_node,
7239c87b03e5Sespie op1, 0)))
7240c87b03e5Sespie && ! TREE_OVERFLOW (t1))
7241c87b03e5Sespie return multiple_of_p (type, t1, bottom);
7242c87b03e5Sespie }
7243c87b03e5Sespie return 0;
7244c87b03e5Sespie
7245c87b03e5Sespie case NOP_EXPR:
7246c87b03e5Sespie /* Can't handle conversions from non-integral or wider integral type. */
7247c87b03e5Sespie if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7248c87b03e5Sespie || (TYPE_PRECISION (type)
7249c87b03e5Sespie < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7250c87b03e5Sespie return 0;
7251c87b03e5Sespie
7252c87b03e5Sespie /* .. fall through ... */
7253c87b03e5Sespie
7254c87b03e5Sespie case SAVE_EXPR:
7255c87b03e5Sespie return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7256c87b03e5Sespie
7257c87b03e5Sespie case INTEGER_CST:
7258c87b03e5Sespie if (TREE_CODE (bottom) != INTEGER_CST
7259c87b03e5Sespie || (TREE_UNSIGNED (type)
7260c87b03e5Sespie && (tree_int_cst_sgn (top) < 0
7261c87b03e5Sespie || tree_int_cst_sgn (bottom) < 0)))
7262c87b03e5Sespie return 0;
7263c87b03e5Sespie return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7264c87b03e5Sespie top, bottom, 0));
7265c87b03e5Sespie
7266c87b03e5Sespie default:
7267c87b03e5Sespie return 0;
7268c87b03e5Sespie }
7269c87b03e5Sespie }
7270c87b03e5Sespie
7271c87b03e5Sespie /* Return true if `t' is known to be non-negative. */
7272c87b03e5Sespie
7273c87b03e5Sespie int
tree_expr_nonnegative_p(t)7274c87b03e5Sespie tree_expr_nonnegative_p (t)
7275c87b03e5Sespie tree t;
7276c87b03e5Sespie {
7277c87b03e5Sespie switch (TREE_CODE (t))
7278c87b03e5Sespie {
7279c87b03e5Sespie case ABS_EXPR:
7280c87b03e5Sespie case FFS_EXPR:
7281c87b03e5Sespie return 1;
7282c87b03e5Sespie case INTEGER_CST:
7283c87b03e5Sespie return tree_int_cst_sgn (t) >= 0;
7284c87b03e5Sespie case TRUNC_DIV_EXPR:
7285c87b03e5Sespie case CEIL_DIV_EXPR:
7286c87b03e5Sespie case FLOOR_DIV_EXPR:
7287c87b03e5Sespie case ROUND_DIV_EXPR:
7288c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7289c87b03e5Sespie && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7290c87b03e5Sespie case TRUNC_MOD_EXPR:
7291c87b03e5Sespie case CEIL_MOD_EXPR:
7292c87b03e5Sespie case FLOOR_MOD_EXPR:
7293c87b03e5Sespie case ROUND_MOD_EXPR:
7294c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7295c87b03e5Sespie case COND_EXPR:
7296c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7297c87b03e5Sespie && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7298c87b03e5Sespie case COMPOUND_EXPR:
7299c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7300c87b03e5Sespie case MIN_EXPR:
7301c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7302c87b03e5Sespie && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7303c87b03e5Sespie case MAX_EXPR:
7304c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7305c87b03e5Sespie || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7306c87b03e5Sespie case MODIFY_EXPR:
7307c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7308c87b03e5Sespie case BIND_EXPR:
7309c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7310c87b03e5Sespie case SAVE_EXPR:
7311c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7312c87b03e5Sespie case NON_LVALUE_EXPR:
7313c87b03e5Sespie return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7314c87b03e5Sespie case RTL_EXPR:
7315c87b03e5Sespie return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7316c87b03e5Sespie
7317c87b03e5Sespie default:
7318c87b03e5Sespie if (truth_value_p (TREE_CODE (t)))
7319c87b03e5Sespie /* Truth values evaluate to 0 or 1, which is nonnegative. */
7320c87b03e5Sespie return 1;
7321c87b03e5Sespie else
7322c87b03e5Sespie /* We don't know sign of `t', so be conservative and return false. */
7323c87b03e5Sespie return 0;
7324c87b03e5Sespie }
7325c87b03e5Sespie }
7326c87b03e5Sespie
7327c87b03e5Sespie /* Return true if `r' is known to be non-negative.
7328c87b03e5Sespie Only handles constants at the moment. */
7329c87b03e5Sespie
7330c87b03e5Sespie int
rtl_expr_nonnegative_p(r)7331c87b03e5Sespie rtl_expr_nonnegative_p (r)
7332c87b03e5Sespie rtx r;
7333c87b03e5Sespie {
7334c87b03e5Sespie switch (GET_CODE (r))
7335c87b03e5Sespie {
7336c87b03e5Sespie case CONST_INT:
7337c87b03e5Sespie return INTVAL (r) >= 0;
7338c87b03e5Sespie
7339c87b03e5Sespie case CONST_DOUBLE:
7340c87b03e5Sespie if (GET_MODE (r) == VOIDmode)
7341c87b03e5Sespie return CONST_DOUBLE_HIGH (r) >= 0;
7342c87b03e5Sespie return 0;
7343c87b03e5Sespie
7344c87b03e5Sespie case CONST_VECTOR:
7345c87b03e5Sespie {
7346c87b03e5Sespie int units, i;
7347c87b03e5Sespie rtx elt;
7348c87b03e5Sespie
7349c87b03e5Sespie units = CONST_VECTOR_NUNITS (r);
7350c87b03e5Sespie
7351c87b03e5Sespie for (i = 0; i < units; ++i)
7352c87b03e5Sespie {
7353c87b03e5Sespie elt = CONST_VECTOR_ELT (r, i);
7354c87b03e5Sespie if (!rtl_expr_nonnegative_p (elt))
7355c87b03e5Sespie return 0;
7356c87b03e5Sespie }
7357c87b03e5Sespie
7358c87b03e5Sespie return 1;
7359c87b03e5Sespie }
7360c87b03e5Sespie
7361c87b03e5Sespie case SYMBOL_REF:
7362c87b03e5Sespie case LABEL_REF:
7363c87b03e5Sespie /* These are always nonnegative. */
7364c87b03e5Sespie return 1;
7365c87b03e5Sespie
7366c87b03e5Sespie default:
7367c87b03e5Sespie return 0;
7368c87b03e5Sespie }
7369c87b03e5Sespie }
7370c87b03e5Sespie
7371c87b03e5Sespie #include "gt-fold-const.h"
7372