138fd1498Szrj /* Fold a constant sub-tree into a single node for C-compiler
238fd1498Szrj Copyright (C) 1987-2018 Free Software Foundation, Inc.
338fd1498Szrj
438fd1498Szrj This file is part of GCC.
538fd1498Szrj
638fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
738fd1498Szrj the terms of the GNU General Public License as published by the Free
838fd1498Szrj Software Foundation; either version 3, or (at your option) any later
938fd1498Szrj version.
1038fd1498Szrj
1138fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1238fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1338fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
1438fd1498Szrj for more details.
1538fd1498Szrj
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3. If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>. */
1938fd1498Szrj
2038fd1498Szrj /*@@ This file should be rewritten to use an arbitrary precision
2138fd1498Szrj @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
2238fd1498Szrj @@ Perhaps the routines could also be used for bc/dc, and made a lib.
2338fd1498Szrj @@ The routines that translate from the ap rep should
2438fd1498Szrj @@ warn if precision et. al. is lost.
2538fd1498Szrj @@ This would also make life easier when this technology is used
2638fd1498Szrj @@ for cross-compilers. */
2738fd1498Szrj
2838fd1498Szrj /* The entry points in this file are fold, size_int_wide and size_binop.
2938fd1498Szrj
3038fd1498Szrj fold takes a tree as argument and returns a simplified tree.
3138fd1498Szrj
3238fd1498Szrj size_binop takes a tree code for an arithmetic operation
3338fd1498Szrj and two operands that are trees, and produces a tree for the
3438fd1498Szrj result, assuming the type comes from `sizetype'.
3538fd1498Szrj
3638fd1498Szrj size_int takes an integer value, and creates a tree constant
3738fd1498Szrj with type from `sizetype'.
3838fd1498Szrj
3938fd1498Szrj Note: Since the folders get called on non-gimple code as well as
4038fd1498Szrj gimple code, we need to handle GIMPLE tuples as well as their
4138fd1498Szrj corresponding tree equivalents. */
4238fd1498Szrj
4338fd1498Szrj #include "config.h"
4438fd1498Szrj #include "system.h"
4538fd1498Szrj #include "coretypes.h"
4638fd1498Szrj #include "backend.h"
4738fd1498Szrj #include "target.h"
4838fd1498Szrj #include "rtl.h"
4938fd1498Szrj #include "tree.h"
5038fd1498Szrj #include "gimple.h"
5138fd1498Szrj #include "predict.h"
5238fd1498Szrj #include "memmodel.h"
5338fd1498Szrj #include "tm_p.h"
5438fd1498Szrj #include "tree-ssa-operands.h"
5538fd1498Szrj #include "optabs-query.h"
5638fd1498Szrj #include "cgraph.h"
5738fd1498Szrj #include "diagnostic-core.h"
5838fd1498Szrj #include "flags.h"
5938fd1498Szrj #include "alias.h"
6038fd1498Szrj #include "fold-const.h"
6138fd1498Szrj #include "fold-const-call.h"
6238fd1498Szrj #include "stor-layout.h"
6338fd1498Szrj #include "calls.h"
6438fd1498Szrj #include "tree-iterator.h"
6538fd1498Szrj #include "expr.h"
6638fd1498Szrj #include "intl.h"
6738fd1498Szrj #include "langhooks.h"
6838fd1498Szrj #include "tree-eh.h"
6938fd1498Szrj #include "gimplify.h"
7038fd1498Szrj #include "tree-dfa.h"
7138fd1498Szrj #include "builtins.h"
7238fd1498Szrj #include "generic-match.h"
7338fd1498Szrj #include "gimple-fold.h"
7438fd1498Szrj #include "params.h"
7538fd1498Szrj #include "tree-into-ssa.h"
7638fd1498Szrj #include "md5.h"
7738fd1498Szrj #include "case-cfn-macros.h"
7838fd1498Szrj #include "stringpool.h"
7938fd1498Szrj #include "tree-vrp.h"
8038fd1498Szrj #include "tree-ssanames.h"
8138fd1498Szrj #include "selftest.h"
8238fd1498Szrj #include "stringpool.h"
8338fd1498Szrj #include "attribs.h"
8438fd1498Szrj #include "tree-vector-builder.h"
8538fd1498Szrj #include "vec-perm-indices.h"
8638fd1498Szrj
8738fd1498Szrj /* Nonzero if we are folding constants inside an initializer; zero
8838fd1498Szrj otherwise. */
8938fd1498Szrj int folding_initializer = 0;
9038fd1498Szrj
9138fd1498Szrj /* The following constants represent a bit based encoding of GCC's
9238fd1498Szrj comparison operators. This encoding simplifies transformations
9338fd1498Szrj on relational comparison operators, such as AND and OR. */
9438fd1498Szrj enum comparison_code {
9538fd1498Szrj COMPCODE_FALSE = 0,
9638fd1498Szrj COMPCODE_LT = 1,
9738fd1498Szrj COMPCODE_EQ = 2,
9838fd1498Szrj COMPCODE_LE = 3,
9938fd1498Szrj COMPCODE_GT = 4,
10038fd1498Szrj COMPCODE_LTGT = 5,
10138fd1498Szrj COMPCODE_GE = 6,
10238fd1498Szrj COMPCODE_ORD = 7,
10338fd1498Szrj COMPCODE_UNORD = 8,
10438fd1498Szrj COMPCODE_UNLT = 9,
10538fd1498Szrj COMPCODE_UNEQ = 10,
10638fd1498Szrj COMPCODE_UNLE = 11,
10738fd1498Szrj COMPCODE_UNGT = 12,
10838fd1498Szrj COMPCODE_NE = 13,
10938fd1498Szrj COMPCODE_UNGE = 14,
11038fd1498Szrj COMPCODE_TRUE = 15
11138fd1498Szrj };
11238fd1498Szrj
11338fd1498Szrj static bool negate_expr_p (tree);
11438fd1498Szrj static tree negate_expr (tree);
11538fd1498Szrj static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
11638fd1498Szrj static enum comparison_code comparison_to_compcode (enum tree_code);
11738fd1498Szrj static enum tree_code compcode_to_comparison (enum comparison_code);
11838fd1498Szrj static int twoval_comparison_p (tree, tree *, tree *);
11938fd1498Szrj static tree eval_subst (location_t, tree, tree, tree, tree, tree);
12038fd1498Szrj static tree optimize_bit_field_compare (location_t, enum tree_code,
12138fd1498Szrj tree, tree, tree);
12238fd1498Szrj static int simple_operand_p (const_tree);
12338fd1498Szrj static bool simple_operand_p_2 (tree);
12438fd1498Szrj static tree range_binop (enum tree_code, tree, tree, int, tree, int);
12538fd1498Szrj static tree range_predecessor (tree);
12638fd1498Szrj static tree range_successor (tree);
12738fd1498Szrj static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
12838fd1498Szrj static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
12938fd1498Szrj static tree unextend (tree, int, int, tree);
13038fd1498Szrj static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
13138fd1498Szrj static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
13238fd1498Szrj static tree fold_binary_op_with_conditional_arg (location_t,
13338fd1498Szrj enum tree_code, tree,
13438fd1498Szrj tree, tree,
13538fd1498Szrj tree, tree, int);
13638fd1498Szrj static tree fold_negate_const (tree, tree);
13738fd1498Szrj static tree fold_not_const (const_tree, tree);
13838fd1498Szrj static tree fold_relational_const (enum tree_code, tree, tree, tree);
13938fd1498Szrj static tree fold_convert_const (enum tree_code, tree, tree);
14038fd1498Szrj static tree fold_view_convert_expr (tree, tree);
14138fd1498Szrj static tree fold_negate_expr (location_t, tree);
14238fd1498Szrj
14338fd1498Szrj
14438fd1498Szrj /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
14538fd1498Szrj Otherwise, return LOC. */
14638fd1498Szrj
14738fd1498Szrj static location_t
expr_location_or(tree t,location_t loc)14838fd1498Szrj expr_location_or (tree t, location_t loc)
14938fd1498Szrj {
15038fd1498Szrj location_t tloc = EXPR_LOCATION (t);
15138fd1498Szrj return tloc == UNKNOWN_LOCATION ? loc : tloc;
15238fd1498Szrj }
15338fd1498Szrj
15438fd1498Szrj /* Similar to protected_set_expr_location, but never modify x in place,
15538fd1498Szrj if location can and needs to be set, unshare it. */
15638fd1498Szrj
15738fd1498Szrj static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)15838fd1498Szrj protected_set_expr_location_unshare (tree x, location_t loc)
15938fd1498Szrj {
16038fd1498Szrj if (CAN_HAVE_LOCATION_P (x)
16138fd1498Szrj && EXPR_LOCATION (x) != loc
16238fd1498Szrj && !(TREE_CODE (x) == SAVE_EXPR
16338fd1498Szrj || TREE_CODE (x) == TARGET_EXPR
16438fd1498Szrj || TREE_CODE (x) == BIND_EXPR))
16538fd1498Szrj {
16638fd1498Szrj x = copy_node (x);
16738fd1498Szrj SET_EXPR_LOCATION (x, loc);
16838fd1498Szrj }
16938fd1498Szrj return x;
17038fd1498Szrj }
17138fd1498Szrj
17238fd1498Szrj /* If ARG2 divides ARG1 with zero remainder, carries out the exact
17338fd1498Szrj division and returns the quotient. Otherwise returns
17438fd1498Szrj NULL_TREE. */
17538fd1498Szrj
17638fd1498Szrj tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)17738fd1498Szrj div_if_zero_remainder (const_tree arg1, const_tree arg2)
17838fd1498Szrj {
17938fd1498Szrj widest_int quo;
18038fd1498Szrj
18138fd1498Szrj if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
18238fd1498Szrj SIGNED, &quo))
18338fd1498Szrj return wide_int_to_tree (TREE_TYPE (arg1), quo);
18438fd1498Szrj
18538fd1498Szrj return NULL_TREE;
18638fd1498Szrj }
18738fd1498Szrj
18838fd1498Szrj /* This is nonzero if we should defer warnings about undefined
18938fd1498Szrj overflow. This facility exists because these warnings are a
19038fd1498Szrj special case. The code to estimate loop iterations does not want
19138fd1498Szrj to issue any warnings, since it works with expressions which do not
19238fd1498Szrj occur in user code. Various bits of cleanup code call fold(), but
19338fd1498Szrj only use the result if it has certain characteristics (e.g., is a
19438fd1498Szrj constant); that code only wants to issue a warning if the result is
19538fd1498Szrj used. */
19638fd1498Szrj
19738fd1498Szrj static int fold_deferring_overflow_warnings;
19838fd1498Szrj
19938fd1498Szrj /* If a warning about undefined overflow is deferred, this is the
20038fd1498Szrj warning. Note that this may cause us to turn two warnings into
20138fd1498Szrj one, but that is fine since it is sufficient to only give one
20238fd1498Szrj warning per expression. */
20338fd1498Szrj
20438fd1498Szrj static const char* fold_deferred_overflow_warning;
20538fd1498Szrj
20638fd1498Szrj /* If a warning about undefined overflow is deferred, this is the
20738fd1498Szrj level at which the warning should be emitted. */
20838fd1498Szrj
20938fd1498Szrj static enum warn_strict_overflow_code fold_deferred_overflow_code;
21038fd1498Szrj
21138fd1498Szrj /* Start deferring overflow warnings. We could use a stack here to
21238fd1498Szrj permit nested calls, but at present it is not necessary. */
21338fd1498Szrj
21438fd1498Szrj void
fold_defer_overflow_warnings(void)21538fd1498Szrj fold_defer_overflow_warnings (void)
21638fd1498Szrj {
21738fd1498Szrj ++fold_deferring_overflow_warnings;
21838fd1498Szrj }
21938fd1498Szrj
22038fd1498Szrj /* Stop deferring overflow warnings. If there is a pending warning,
22138fd1498Szrj and ISSUE is true, then issue the warning if appropriate. STMT is
22238fd1498Szrj the statement with which the warning should be associated (used for
22338fd1498Szrj location information); STMT may be NULL. CODE is the level of the
22438fd1498Szrj warning--a warn_strict_overflow_code value. This function will use
22538fd1498Szrj the smaller of CODE and the deferred code when deciding whether to
22638fd1498Szrj issue the warning. CODE may be zero to mean to always use the
22738fd1498Szrj deferred code. */
22838fd1498Szrj
22938fd1498Szrj void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)23038fd1498Szrj fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
23138fd1498Szrj {
23238fd1498Szrj const char *warnmsg;
23338fd1498Szrj location_t locus;
23438fd1498Szrj
23538fd1498Szrj gcc_assert (fold_deferring_overflow_warnings > 0);
23638fd1498Szrj --fold_deferring_overflow_warnings;
23738fd1498Szrj if (fold_deferring_overflow_warnings > 0)
23838fd1498Szrj {
23938fd1498Szrj if (fold_deferred_overflow_warning != NULL
24038fd1498Szrj && code != 0
24138fd1498Szrj && code < (int) fold_deferred_overflow_code)
24238fd1498Szrj fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
24338fd1498Szrj return;
24438fd1498Szrj }
24538fd1498Szrj
24638fd1498Szrj warnmsg = fold_deferred_overflow_warning;
24738fd1498Szrj fold_deferred_overflow_warning = NULL;
24838fd1498Szrj
24938fd1498Szrj if (!issue || warnmsg == NULL)
25038fd1498Szrj return;
25138fd1498Szrj
25238fd1498Szrj if (gimple_no_warning_p (stmt))
25338fd1498Szrj return;
25438fd1498Szrj
25538fd1498Szrj /* Use the smallest code level when deciding to issue the
25638fd1498Szrj warning. */
25738fd1498Szrj if (code == 0 || code > (int) fold_deferred_overflow_code)
25838fd1498Szrj code = fold_deferred_overflow_code;
25938fd1498Szrj
26038fd1498Szrj if (!issue_strict_overflow_warning (code))
26138fd1498Szrj return;
26238fd1498Szrj
26338fd1498Szrj if (stmt == NULL)
26438fd1498Szrj locus = input_location;
26538fd1498Szrj else
26638fd1498Szrj locus = gimple_location (stmt);
26738fd1498Szrj warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
26838fd1498Szrj }
26938fd1498Szrj
27038fd1498Szrj /* Stop deferring overflow warnings, ignoring any deferred
27138fd1498Szrj warnings. */
27238fd1498Szrj
27338fd1498Szrj void
fold_undefer_and_ignore_overflow_warnings(void)27438fd1498Szrj fold_undefer_and_ignore_overflow_warnings (void)
27538fd1498Szrj {
27638fd1498Szrj fold_undefer_overflow_warnings (false, NULL, 0);
27738fd1498Szrj }
27838fd1498Szrj
27938fd1498Szrj /* Whether we are deferring overflow warnings. */
28038fd1498Szrj
28138fd1498Szrj bool
fold_deferring_overflow_warnings_p(void)28238fd1498Szrj fold_deferring_overflow_warnings_p (void)
28338fd1498Szrj {
28438fd1498Szrj return fold_deferring_overflow_warnings > 0;
28538fd1498Szrj }
28638fd1498Szrj
28738fd1498Szrj /* This is called when we fold something based on the fact that signed
28838fd1498Szrj overflow is undefined. */
28938fd1498Szrj
29038fd1498Szrj void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)29138fd1498Szrj fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
29238fd1498Szrj {
29338fd1498Szrj if (fold_deferring_overflow_warnings > 0)
29438fd1498Szrj {
29538fd1498Szrj if (fold_deferred_overflow_warning == NULL
29638fd1498Szrj || wc < fold_deferred_overflow_code)
29738fd1498Szrj {
29838fd1498Szrj fold_deferred_overflow_warning = gmsgid;
29938fd1498Szrj fold_deferred_overflow_code = wc;
30038fd1498Szrj }
30138fd1498Szrj }
30238fd1498Szrj else if (issue_strict_overflow_warning (wc))
30338fd1498Szrj warning (OPT_Wstrict_overflow, gmsgid);
30438fd1498Szrj }
30538fd1498Szrj
30638fd1498Szrj /* Return true if the built-in mathematical function specified by CODE
30738fd1498Szrj is odd, i.e. -f(x) == f(-x). */
30838fd1498Szrj
30938fd1498Szrj bool
negate_mathfn_p(combined_fn fn)31038fd1498Szrj negate_mathfn_p (combined_fn fn)
31138fd1498Szrj {
31238fd1498Szrj switch (fn)
31338fd1498Szrj {
31438fd1498Szrj CASE_CFN_ASIN:
31538fd1498Szrj CASE_CFN_ASINH:
31638fd1498Szrj CASE_CFN_ATAN:
31738fd1498Szrj CASE_CFN_ATANH:
31838fd1498Szrj CASE_CFN_CASIN:
31938fd1498Szrj CASE_CFN_CASINH:
32038fd1498Szrj CASE_CFN_CATAN:
32138fd1498Szrj CASE_CFN_CATANH:
32238fd1498Szrj CASE_CFN_CBRT:
32338fd1498Szrj CASE_CFN_CPROJ:
32438fd1498Szrj CASE_CFN_CSIN:
32538fd1498Szrj CASE_CFN_CSINH:
32638fd1498Szrj CASE_CFN_CTAN:
32738fd1498Szrj CASE_CFN_CTANH:
32838fd1498Szrj CASE_CFN_ERF:
32938fd1498Szrj CASE_CFN_LLROUND:
33038fd1498Szrj CASE_CFN_LROUND:
33138fd1498Szrj CASE_CFN_ROUND:
33238fd1498Szrj CASE_CFN_SIN:
33338fd1498Szrj CASE_CFN_SINH:
33438fd1498Szrj CASE_CFN_TAN:
33538fd1498Szrj CASE_CFN_TANH:
33638fd1498Szrj CASE_CFN_TRUNC:
33738fd1498Szrj return true;
33838fd1498Szrj
33938fd1498Szrj CASE_CFN_LLRINT:
34038fd1498Szrj CASE_CFN_LRINT:
34138fd1498Szrj CASE_CFN_NEARBYINT:
34238fd1498Szrj CASE_CFN_RINT:
34338fd1498Szrj return !flag_rounding_math;
34438fd1498Szrj
34538fd1498Szrj default:
34638fd1498Szrj break;
34738fd1498Szrj }
34838fd1498Szrj return false;
34938fd1498Szrj }
35038fd1498Szrj
35138fd1498Szrj /* Check whether we may negate an integer constant T without causing
35238fd1498Szrj overflow. */
35338fd1498Szrj
35438fd1498Szrj bool
may_negate_without_overflow_p(const_tree t)35538fd1498Szrj may_negate_without_overflow_p (const_tree t)
35638fd1498Szrj {
35738fd1498Szrj tree type;
35838fd1498Szrj
35938fd1498Szrj gcc_assert (TREE_CODE (t) == INTEGER_CST);
36038fd1498Szrj
36138fd1498Szrj type = TREE_TYPE (t);
36238fd1498Szrj if (TYPE_UNSIGNED (type))
36338fd1498Szrj return false;
36438fd1498Szrj
36538fd1498Szrj return !wi::only_sign_bit_p (wi::to_wide (t));
36638fd1498Szrj }
36738fd1498Szrj
36838fd1498Szrj /* Determine whether an expression T can be cheaply negated using
36938fd1498Szrj the function negate_expr without introducing undefined overflow. */
37038fd1498Szrj
37138fd1498Szrj static bool
negate_expr_p(tree t)37238fd1498Szrj negate_expr_p (tree t)
37338fd1498Szrj {
37438fd1498Szrj tree type;
37538fd1498Szrj
37638fd1498Szrj if (t == 0)
37738fd1498Szrj return false;
37838fd1498Szrj
37938fd1498Szrj type = TREE_TYPE (t);
38038fd1498Szrj
38138fd1498Szrj STRIP_SIGN_NOPS (t);
38238fd1498Szrj switch (TREE_CODE (t))
38338fd1498Szrj {
38438fd1498Szrj case INTEGER_CST:
38538fd1498Szrj if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
38638fd1498Szrj return true;
38738fd1498Szrj
38838fd1498Szrj /* Check that -CST will not overflow type. */
38938fd1498Szrj return may_negate_without_overflow_p (t);
39038fd1498Szrj case BIT_NOT_EXPR:
39138fd1498Szrj return (INTEGRAL_TYPE_P (type)
39238fd1498Szrj && TYPE_OVERFLOW_WRAPS (type));
39338fd1498Szrj
39438fd1498Szrj case FIXED_CST:
39538fd1498Szrj return true;
39638fd1498Szrj
39738fd1498Szrj case NEGATE_EXPR:
39838fd1498Szrj return !TYPE_OVERFLOW_SANITIZED (type);
39938fd1498Szrj
40038fd1498Szrj case REAL_CST:
40138fd1498Szrj /* We want to canonicalize to positive real constants. Pretend
40238fd1498Szrj that only negative ones can be easily negated. */
40338fd1498Szrj return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
40438fd1498Szrj
40538fd1498Szrj case COMPLEX_CST:
40638fd1498Szrj return negate_expr_p (TREE_REALPART (t))
40738fd1498Szrj && negate_expr_p (TREE_IMAGPART (t));
40838fd1498Szrj
40938fd1498Szrj case VECTOR_CST:
41038fd1498Szrj {
41138fd1498Szrj if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
41238fd1498Szrj return true;
41338fd1498Szrj
41438fd1498Szrj /* Steps don't prevent negation. */
41538fd1498Szrj unsigned int count = vector_cst_encoded_nelts (t);
41638fd1498Szrj for (unsigned int i = 0; i < count; ++i)
41738fd1498Szrj if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
41838fd1498Szrj return false;
41938fd1498Szrj
42038fd1498Szrj return true;
42138fd1498Szrj }
42238fd1498Szrj
42338fd1498Szrj case COMPLEX_EXPR:
42438fd1498Szrj return negate_expr_p (TREE_OPERAND (t, 0))
42538fd1498Szrj && negate_expr_p (TREE_OPERAND (t, 1));
42638fd1498Szrj
42738fd1498Szrj case CONJ_EXPR:
42838fd1498Szrj return negate_expr_p (TREE_OPERAND (t, 0));
42938fd1498Szrj
43038fd1498Szrj case PLUS_EXPR:
43138fd1498Szrj if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
43238fd1498Szrj || HONOR_SIGNED_ZEROS (element_mode (type))
43338fd1498Szrj || (ANY_INTEGRAL_TYPE_P (type)
43438fd1498Szrj && ! TYPE_OVERFLOW_WRAPS (type)))
43538fd1498Szrj return false;
43638fd1498Szrj /* -(A + B) -> (-B) - A. */
43738fd1498Szrj if (negate_expr_p (TREE_OPERAND (t, 1)))
43838fd1498Szrj return true;
43938fd1498Szrj /* -(A + B) -> (-A) - B. */
44038fd1498Szrj return negate_expr_p (TREE_OPERAND (t, 0));
44138fd1498Szrj
44238fd1498Szrj case MINUS_EXPR:
44338fd1498Szrj /* We can't turn -(A-B) into B-A when we honor signed zeros. */
44438fd1498Szrj return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
44538fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (type))
44638fd1498Szrj && (! ANY_INTEGRAL_TYPE_P (type)
44738fd1498Szrj || TYPE_OVERFLOW_WRAPS (type));
44838fd1498Szrj
44938fd1498Szrj case MULT_EXPR:
45038fd1498Szrj if (TYPE_UNSIGNED (type))
45138fd1498Szrj break;
45238fd1498Szrj /* INT_MIN/n * n doesn't overflow while negating one operand it does
45338fd1498Szrj if n is a (negative) power of two. */
45438fd1498Szrj if (INTEGRAL_TYPE_P (TREE_TYPE (t))
45538fd1498Szrj && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
45638fd1498Szrj && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
45738fd1498Szrj && (wi::popcount
45838fd1498Szrj (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
45938fd1498Szrj || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
46038fd1498Szrj && (wi::popcount
46138fd1498Szrj (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
46238fd1498Szrj break;
46338fd1498Szrj
46438fd1498Szrj /* Fall through. */
46538fd1498Szrj
46638fd1498Szrj case RDIV_EXPR:
46738fd1498Szrj if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
46838fd1498Szrj return negate_expr_p (TREE_OPERAND (t, 1))
46938fd1498Szrj || negate_expr_p (TREE_OPERAND (t, 0));
47038fd1498Szrj break;
47138fd1498Szrj
47238fd1498Szrj case TRUNC_DIV_EXPR:
47338fd1498Szrj case ROUND_DIV_EXPR:
47438fd1498Szrj case EXACT_DIV_EXPR:
47538fd1498Szrj if (TYPE_UNSIGNED (type))
47638fd1498Szrj break;
47758e805e6Szrj /* In general we can't negate A in A / B, because if A is INT_MIN and
47858e805e6Szrj B is not 1 we change the sign of the result. */
47958e805e6Szrj if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
48058e805e6Szrj && negate_expr_p (TREE_OPERAND (t, 0)))
48138fd1498Szrj return true;
48238fd1498Szrj /* In general we can't negate B in A / B, because if A is INT_MIN and
48338fd1498Szrj B is 1, we may turn this into INT_MIN / -1 which is undefined
48438fd1498Szrj and actually traps on some architectures. */
48558e805e6Szrj if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
48638fd1498Szrj || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
48738fd1498Szrj || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
48838fd1498Szrj && ! integer_onep (TREE_OPERAND (t, 1))))
48938fd1498Szrj return negate_expr_p (TREE_OPERAND (t, 1));
49038fd1498Szrj break;
49138fd1498Szrj
49238fd1498Szrj case NOP_EXPR:
49338fd1498Szrj /* Negate -((double)float) as (double)(-float). */
49438fd1498Szrj if (TREE_CODE (type) == REAL_TYPE)
49538fd1498Szrj {
49638fd1498Szrj tree tem = strip_float_extensions (t);
49738fd1498Szrj if (tem != t)
49838fd1498Szrj return negate_expr_p (tem);
49938fd1498Szrj }
50038fd1498Szrj break;
50138fd1498Szrj
50238fd1498Szrj case CALL_EXPR:
50338fd1498Szrj /* Negate -f(x) as f(-x). */
50438fd1498Szrj if (negate_mathfn_p (get_call_combined_fn (t)))
50538fd1498Szrj return negate_expr_p (CALL_EXPR_ARG (t, 0));
50638fd1498Szrj break;
50738fd1498Szrj
50838fd1498Szrj case RSHIFT_EXPR:
50938fd1498Szrj /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
51038fd1498Szrj if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
51138fd1498Szrj {
51238fd1498Szrj tree op1 = TREE_OPERAND (t, 1);
51338fd1498Szrj if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
51438fd1498Szrj return true;
51538fd1498Szrj }
51638fd1498Szrj break;
51738fd1498Szrj
51838fd1498Szrj default:
51938fd1498Szrj break;
52038fd1498Szrj }
52138fd1498Szrj return false;
52238fd1498Szrj }
52338fd1498Szrj
52438fd1498Szrj /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
52538fd1498Szrj simplification is possible.
52638fd1498Szrj If negate_expr_p would return true for T, NULL_TREE will never be
52738fd1498Szrj returned. */
52838fd1498Szrj
52938fd1498Szrj static tree
fold_negate_expr_1(location_t loc,tree t)53038fd1498Szrj fold_negate_expr_1 (location_t loc, tree t)
53138fd1498Szrj {
53238fd1498Szrj tree type = TREE_TYPE (t);
53338fd1498Szrj tree tem;
53438fd1498Szrj
53538fd1498Szrj switch (TREE_CODE (t))
53638fd1498Szrj {
53738fd1498Szrj /* Convert - (~A) to A + 1. */
53838fd1498Szrj case BIT_NOT_EXPR:
53938fd1498Szrj if (INTEGRAL_TYPE_P (type))
54038fd1498Szrj return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
54138fd1498Szrj build_one_cst (type));
54238fd1498Szrj break;
54338fd1498Szrj
54438fd1498Szrj case INTEGER_CST:
54538fd1498Szrj tem = fold_negate_const (t, type);
54638fd1498Szrj if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
54738fd1498Szrj || (ANY_INTEGRAL_TYPE_P (type)
54838fd1498Szrj && !TYPE_OVERFLOW_TRAPS (type)
54938fd1498Szrj && TYPE_OVERFLOW_WRAPS (type))
55038fd1498Szrj || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
55138fd1498Szrj return tem;
55238fd1498Szrj break;
55338fd1498Szrj
55438fd1498Szrj case POLY_INT_CST:
55538fd1498Szrj case REAL_CST:
55638fd1498Szrj case FIXED_CST:
55738fd1498Szrj tem = fold_negate_const (t, type);
55838fd1498Szrj return tem;
55938fd1498Szrj
56038fd1498Szrj case COMPLEX_CST:
56138fd1498Szrj {
56238fd1498Szrj tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
56338fd1498Szrj tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
56438fd1498Szrj if (rpart && ipart)
56538fd1498Szrj return build_complex (type, rpart, ipart);
56638fd1498Szrj }
56738fd1498Szrj break;
56838fd1498Szrj
56938fd1498Szrj case VECTOR_CST:
57038fd1498Szrj {
57138fd1498Szrj tree_vector_builder elts;
57238fd1498Szrj elts.new_unary_operation (type, t, true);
57338fd1498Szrj unsigned int count = elts.encoded_nelts ();
57438fd1498Szrj for (unsigned int i = 0; i < count; ++i)
57538fd1498Szrj {
57638fd1498Szrj tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
57738fd1498Szrj if (elt == NULL_TREE)
57838fd1498Szrj return NULL_TREE;
57938fd1498Szrj elts.quick_push (elt);
58038fd1498Szrj }
58138fd1498Szrj
58238fd1498Szrj return elts.build ();
58338fd1498Szrj }
58438fd1498Szrj
58538fd1498Szrj case COMPLEX_EXPR:
58638fd1498Szrj if (negate_expr_p (t))
58738fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type,
58838fd1498Szrj fold_negate_expr (loc, TREE_OPERAND (t, 0)),
58938fd1498Szrj fold_negate_expr (loc, TREE_OPERAND (t, 1)));
59038fd1498Szrj break;
59138fd1498Szrj
59238fd1498Szrj case CONJ_EXPR:
59338fd1498Szrj if (negate_expr_p (t))
59438fd1498Szrj return fold_build1_loc (loc, CONJ_EXPR, type,
59538fd1498Szrj fold_negate_expr (loc, TREE_OPERAND (t, 0)));
59638fd1498Szrj break;
59738fd1498Szrj
59838fd1498Szrj case NEGATE_EXPR:
59938fd1498Szrj if (!TYPE_OVERFLOW_SANITIZED (type))
60038fd1498Szrj return TREE_OPERAND (t, 0);
60138fd1498Szrj break;
60238fd1498Szrj
60338fd1498Szrj case PLUS_EXPR:
60438fd1498Szrj if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
60538fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (type)))
60638fd1498Szrj {
60738fd1498Szrj /* -(A + B) -> (-B) - A. */
60838fd1498Szrj if (negate_expr_p (TREE_OPERAND (t, 1)))
60938fd1498Szrj {
61038fd1498Szrj tem = negate_expr (TREE_OPERAND (t, 1));
61138fd1498Szrj return fold_build2_loc (loc, MINUS_EXPR, type,
61238fd1498Szrj tem, TREE_OPERAND (t, 0));
61338fd1498Szrj }
61438fd1498Szrj
61538fd1498Szrj /* -(A + B) -> (-A) - B. */
61638fd1498Szrj if (negate_expr_p (TREE_OPERAND (t, 0)))
61738fd1498Szrj {
61838fd1498Szrj tem = negate_expr (TREE_OPERAND (t, 0));
61938fd1498Szrj return fold_build2_loc (loc, MINUS_EXPR, type,
62038fd1498Szrj tem, TREE_OPERAND (t, 1));
62138fd1498Szrj }
62238fd1498Szrj }
62338fd1498Szrj break;
62438fd1498Szrj
62538fd1498Szrj case MINUS_EXPR:
62638fd1498Szrj /* - (A - B) -> B - A */
62738fd1498Szrj if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
62838fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (type)))
62938fd1498Szrj return fold_build2_loc (loc, MINUS_EXPR, type,
63038fd1498Szrj TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
63138fd1498Szrj break;
63238fd1498Szrj
63338fd1498Szrj case MULT_EXPR:
63438fd1498Szrj if (TYPE_UNSIGNED (type))
63538fd1498Szrj break;
63638fd1498Szrj
63738fd1498Szrj /* Fall through. */
63838fd1498Szrj
63938fd1498Szrj case RDIV_EXPR:
64038fd1498Szrj if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
64138fd1498Szrj {
64238fd1498Szrj tem = TREE_OPERAND (t, 1);
64338fd1498Szrj if (negate_expr_p (tem))
64438fd1498Szrj return fold_build2_loc (loc, TREE_CODE (t), type,
64538fd1498Szrj TREE_OPERAND (t, 0), negate_expr (tem));
64638fd1498Szrj tem = TREE_OPERAND (t, 0);
64738fd1498Szrj if (negate_expr_p (tem))
64838fd1498Szrj return fold_build2_loc (loc, TREE_CODE (t), type,
64938fd1498Szrj negate_expr (tem), TREE_OPERAND (t, 1));
65038fd1498Szrj }
65138fd1498Szrj break;
65238fd1498Szrj
65338fd1498Szrj case TRUNC_DIV_EXPR:
65438fd1498Szrj case ROUND_DIV_EXPR:
65538fd1498Szrj case EXACT_DIV_EXPR:
65638fd1498Szrj if (TYPE_UNSIGNED (type))
65738fd1498Szrj break;
65858e805e6Szrj /* In general we can't negate A in A / B, because if A is INT_MIN and
65958e805e6Szrj B is not 1 we change the sign of the result. */
66058e805e6Szrj if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
66158e805e6Szrj && negate_expr_p (TREE_OPERAND (t, 0)))
66238fd1498Szrj return fold_build2_loc (loc, TREE_CODE (t), type,
66338fd1498Szrj negate_expr (TREE_OPERAND (t, 0)),
66438fd1498Szrj TREE_OPERAND (t, 1));
66538fd1498Szrj /* In general we can't negate B in A / B, because if A is INT_MIN and
66638fd1498Szrj B is 1, we may turn this into INT_MIN / -1 which is undefined
66738fd1498Szrj and actually traps on some architectures. */
66858e805e6Szrj if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
66938fd1498Szrj || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
67038fd1498Szrj || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
67138fd1498Szrj && ! integer_onep (TREE_OPERAND (t, 1))))
67238fd1498Szrj && negate_expr_p (TREE_OPERAND (t, 1)))
67338fd1498Szrj return fold_build2_loc (loc, TREE_CODE (t), type,
67438fd1498Szrj TREE_OPERAND (t, 0),
67538fd1498Szrj negate_expr (TREE_OPERAND (t, 1)));
67638fd1498Szrj break;
67738fd1498Szrj
67838fd1498Szrj case NOP_EXPR:
67938fd1498Szrj /* Convert -((double)float) into (double)(-float). */
68038fd1498Szrj if (TREE_CODE (type) == REAL_TYPE)
68138fd1498Szrj {
68238fd1498Szrj tem = strip_float_extensions (t);
68338fd1498Szrj if (tem != t && negate_expr_p (tem))
68438fd1498Szrj return fold_convert_loc (loc, type, negate_expr (tem));
68538fd1498Szrj }
68638fd1498Szrj break;
68738fd1498Szrj
68838fd1498Szrj case CALL_EXPR:
68938fd1498Szrj /* Negate -f(x) as f(-x). */
69038fd1498Szrj if (negate_mathfn_p (get_call_combined_fn (t))
69138fd1498Szrj && negate_expr_p (CALL_EXPR_ARG (t, 0)))
69238fd1498Szrj {
69338fd1498Szrj tree fndecl, arg;
69438fd1498Szrj
69538fd1498Szrj fndecl = get_callee_fndecl (t);
69638fd1498Szrj arg = negate_expr (CALL_EXPR_ARG (t, 0));
69738fd1498Szrj return build_call_expr_loc (loc, fndecl, 1, arg);
69838fd1498Szrj }
69938fd1498Szrj break;
70038fd1498Szrj
70138fd1498Szrj case RSHIFT_EXPR:
70238fd1498Szrj /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
70338fd1498Szrj if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
70438fd1498Szrj {
70538fd1498Szrj tree op1 = TREE_OPERAND (t, 1);
70638fd1498Szrj if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
70738fd1498Szrj {
70838fd1498Szrj tree ntype = TYPE_UNSIGNED (type)
70938fd1498Szrj ? signed_type_for (type)
71038fd1498Szrj : unsigned_type_for (type);
71138fd1498Szrj tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
71238fd1498Szrj temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
71338fd1498Szrj return fold_convert_loc (loc, type, temp);
71438fd1498Szrj }
71538fd1498Szrj }
71638fd1498Szrj break;
71738fd1498Szrj
71838fd1498Szrj default:
71938fd1498Szrj break;
72038fd1498Szrj }
72138fd1498Szrj
72238fd1498Szrj return NULL_TREE;
72338fd1498Szrj }
72438fd1498Szrj
72538fd1498Szrj /* A wrapper for fold_negate_expr_1. */
72638fd1498Szrj
72738fd1498Szrj static tree
fold_negate_expr(location_t loc,tree t)72838fd1498Szrj fold_negate_expr (location_t loc, tree t)
72938fd1498Szrj {
73038fd1498Szrj tree type = TREE_TYPE (t);
73138fd1498Szrj STRIP_SIGN_NOPS (t);
73238fd1498Szrj tree tem = fold_negate_expr_1 (loc, t);
73338fd1498Szrj if (tem == NULL_TREE)
73438fd1498Szrj return NULL_TREE;
73538fd1498Szrj return fold_convert_loc (loc, type, tem);
73638fd1498Szrj }
73738fd1498Szrj
73838fd1498Szrj /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
73938fd1498Szrj negated in a simpler way. Also allow for T to be NULL_TREE, in which case
74038fd1498Szrj return NULL_TREE. */
74138fd1498Szrj
74238fd1498Szrj static tree
negate_expr(tree t)74338fd1498Szrj negate_expr (tree t)
74438fd1498Szrj {
74538fd1498Szrj tree type, tem;
74638fd1498Szrj location_t loc;
74738fd1498Szrj
74838fd1498Szrj if (t == NULL_TREE)
74938fd1498Szrj return NULL_TREE;
75038fd1498Szrj
75138fd1498Szrj loc = EXPR_LOCATION (t);
75238fd1498Szrj type = TREE_TYPE (t);
75338fd1498Szrj STRIP_SIGN_NOPS (t);
75438fd1498Szrj
75538fd1498Szrj tem = fold_negate_expr (loc, t);
75638fd1498Szrj if (!tem)
75738fd1498Szrj tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
75838fd1498Szrj return fold_convert_loc (loc, type, tem);
75938fd1498Szrj }
76038fd1498Szrj
76138fd1498Szrj /* Split a tree IN into a constant, literal and variable parts that could be
76238fd1498Szrj combined with CODE to make IN. "constant" means an expression with
76338fd1498Szrj TREE_CONSTANT but that isn't an actual constant. CODE must be a
76438fd1498Szrj commutative arithmetic operation. Store the constant part into *CONP,
76538fd1498Szrj the literal in *LITP and return the variable part. If a part isn't
76638fd1498Szrj present, set it to null. If the tree does not decompose in this way,
76738fd1498Szrj return the entire tree as the variable part and the other parts as null.
76838fd1498Szrj
76938fd1498Szrj If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
77038fd1498Szrj case, we negate an operand that was subtracted. Except if it is a
77138fd1498Szrj literal for which we use *MINUS_LITP instead.
77238fd1498Szrj
77338fd1498Szrj If NEGATE_P is true, we are negating all of IN, again except a literal
77438fd1498Szrj for which we use *MINUS_LITP instead. If a variable part is of pointer
77538fd1498Szrj type, it is negated after converting to TYPE. This prevents us from
77638fd1498Szrj generating illegal MINUS pointer expression. LOC is the location of
77738fd1498Szrj the converted variable part.
77838fd1498Szrj
77938fd1498Szrj If IN is itself a literal or constant, return it as appropriate.
78038fd1498Szrj
78138fd1498Szrj Note that we do not guarantee that any of the three values will be the
78238fd1498Szrj same type as IN, but they will have the same signedness and mode. */
78338fd1498Szrj
78438fd1498Szrj static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)78538fd1498Szrj split_tree (tree in, tree type, enum tree_code code,
78638fd1498Szrj tree *minus_varp, tree *conp, tree *minus_conp,
78738fd1498Szrj tree *litp, tree *minus_litp, int negate_p)
78838fd1498Szrj {
78938fd1498Szrj tree var = 0;
79038fd1498Szrj *minus_varp = 0;
79138fd1498Szrj *conp = 0;
79238fd1498Szrj *minus_conp = 0;
79338fd1498Szrj *litp = 0;
79438fd1498Szrj *minus_litp = 0;
79538fd1498Szrj
79638fd1498Szrj /* Strip any conversions that don't change the machine mode or signedness. */
79738fd1498Szrj STRIP_SIGN_NOPS (in);
79838fd1498Szrj
79938fd1498Szrj if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
80038fd1498Szrj || TREE_CODE (in) == FIXED_CST)
80138fd1498Szrj *litp = in;
80238fd1498Szrj else if (TREE_CODE (in) == code
80338fd1498Szrj || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
80438fd1498Szrj && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
80538fd1498Szrj /* We can associate addition and subtraction together (even
80638fd1498Szrj though the C standard doesn't say so) for integers because
80738fd1498Szrj the value is not affected. For reals, the value might be
80838fd1498Szrj affected, so we can't. */
80938fd1498Szrj && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
81038fd1498Szrj || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
81138fd1498Szrj || (code == MINUS_EXPR
81238fd1498Szrj && (TREE_CODE (in) == PLUS_EXPR
81338fd1498Szrj || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
81438fd1498Szrj {
81538fd1498Szrj tree op0 = TREE_OPERAND (in, 0);
81638fd1498Szrj tree op1 = TREE_OPERAND (in, 1);
81738fd1498Szrj int neg1_p = TREE_CODE (in) == MINUS_EXPR;
81838fd1498Szrj int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
81938fd1498Szrj
82038fd1498Szrj /* First see if either of the operands is a literal, then a constant. */
82138fd1498Szrj if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
82238fd1498Szrj || TREE_CODE (op0) == FIXED_CST)
82338fd1498Szrj *litp = op0, op0 = 0;
82438fd1498Szrj else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
82538fd1498Szrj || TREE_CODE (op1) == FIXED_CST)
82638fd1498Szrj *litp = op1, neg_litp_p = neg1_p, op1 = 0;
82738fd1498Szrj
82838fd1498Szrj if (op0 != 0 && TREE_CONSTANT (op0))
82938fd1498Szrj *conp = op0, op0 = 0;
83038fd1498Szrj else if (op1 != 0 && TREE_CONSTANT (op1))
83138fd1498Szrj *conp = op1, neg_conp_p = neg1_p, op1 = 0;
83238fd1498Szrj
83338fd1498Szrj /* If we haven't dealt with either operand, this is not a case we can
83438fd1498Szrj decompose. Otherwise, VAR is either of the ones remaining, if any. */
83538fd1498Szrj if (op0 != 0 && op1 != 0)
83638fd1498Szrj var = in;
83738fd1498Szrj else if (op0 != 0)
83838fd1498Szrj var = op0;
83938fd1498Szrj else
84038fd1498Szrj var = op1, neg_var_p = neg1_p;
84138fd1498Szrj
84238fd1498Szrj /* Now do any needed negations. */
84338fd1498Szrj if (neg_litp_p)
84438fd1498Szrj *minus_litp = *litp, *litp = 0;
84538fd1498Szrj if (neg_conp_p && *conp)
84638fd1498Szrj *minus_conp = *conp, *conp = 0;
84738fd1498Szrj if (neg_var_p && var)
84838fd1498Szrj *minus_varp = var, var = 0;
84938fd1498Szrj }
85038fd1498Szrj else if (TREE_CONSTANT (in))
85138fd1498Szrj *conp = in;
85238fd1498Szrj else if (TREE_CODE (in) == BIT_NOT_EXPR
85338fd1498Szrj && code == PLUS_EXPR)
85438fd1498Szrj {
85538fd1498Szrj /* -1 - X is folded to ~X, undo that here. Do _not_ do this
85638fd1498Szrj when IN is constant. */
85738fd1498Szrj *litp = build_minus_one_cst (type);
85838fd1498Szrj *minus_varp = TREE_OPERAND (in, 0);
85938fd1498Szrj }
86038fd1498Szrj else
86138fd1498Szrj var = in;
86238fd1498Szrj
86338fd1498Szrj if (negate_p)
86438fd1498Szrj {
86538fd1498Szrj if (*litp)
86638fd1498Szrj *minus_litp = *litp, *litp = 0;
86738fd1498Szrj else if (*minus_litp)
86838fd1498Szrj *litp = *minus_litp, *minus_litp = 0;
86938fd1498Szrj if (*conp)
87038fd1498Szrj *minus_conp = *conp, *conp = 0;
87138fd1498Szrj else if (*minus_conp)
87238fd1498Szrj *conp = *minus_conp, *minus_conp = 0;
87338fd1498Szrj if (var)
87438fd1498Szrj *minus_varp = var, var = 0;
87538fd1498Szrj else if (*minus_varp)
87638fd1498Szrj var = *minus_varp, *minus_varp = 0;
87738fd1498Szrj }
87838fd1498Szrj
87938fd1498Szrj if (*litp
88038fd1498Szrj && TREE_OVERFLOW_P (*litp))
88138fd1498Szrj *litp = drop_tree_overflow (*litp);
88238fd1498Szrj if (*minus_litp
88338fd1498Szrj && TREE_OVERFLOW_P (*minus_litp))
88438fd1498Szrj *minus_litp = drop_tree_overflow (*minus_litp);
88538fd1498Szrj
88638fd1498Szrj return var;
88738fd1498Szrj }
88838fd1498Szrj
88938fd1498Szrj /* Re-associate trees split by the above function. T1 and T2 are
89038fd1498Szrj either expressions to associate or null. Return the new
89138fd1498Szrj expression, if any. LOC is the location of the new expression. If
89238fd1498Szrj we build an operation, do it in TYPE and with CODE. */
89338fd1498Szrj
89438fd1498Szrj static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)89538fd1498Szrj associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
89638fd1498Szrj {
89738fd1498Szrj if (t1 == 0)
89838fd1498Szrj {
89938fd1498Szrj gcc_assert (t2 == 0 || code != MINUS_EXPR);
90038fd1498Szrj return t2;
90138fd1498Szrj }
90238fd1498Szrj else if (t2 == 0)
90338fd1498Szrj return t1;
90438fd1498Szrj
90538fd1498Szrj /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
90638fd1498Szrj try to fold this since we will have infinite recursion. But do
90738fd1498Szrj deal with any NEGATE_EXPRs. */
90838fd1498Szrj if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
90938fd1498Szrj || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
91038fd1498Szrj || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
91138fd1498Szrj {
91238fd1498Szrj if (code == PLUS_EXPR)
91338fd1498Szrj {
91438fd1498Szrj if (TREE_CODE (t1) == NEGATE_EXPR)
91538fd1498Szrj return build2_loc (loc, MINUS_EXPR, type,
91638fd1498Szrj fold_convert_loc (loc, type, t2),
91738fd1498Szrj fold_convert_loc (loc, type,
91838fd1498Szrj TREE_OPERAND (t1, 0)));
91938fd1498Szrj else if (TREE_CODE (t2) == NEGATE_EXPR)
92038fd1498Szrj return build2_loc (loc, MINUS_EXPR, type,
92138fd1498Szrj fold_convert_loc (loc, type, t1),
92238fd1498Szrj fold_convert_loc (loc, type,
92338fd1498Szrj TREE_OPERAND (t2, 0)));
92438fd1498Szrj else if (integer_zerop (t2))
92538fd1498Szrj return fold_convert_loc (loc, type, t1);
92638fd1498Szrj }
92738fd1498Szrj else if (code == MINUS_EXPR)
92838fd1498Szrj {
92938fd1498Szrj if (integer_zerop (t2))
93038fd1498Szrj return fold_convert_loc (loc, type, t1);
93138fd1498Szrj }
93238fd1498Szrj
93338fd1498Szrj return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
93438fd1498Szrj fold_convert_loc (loc, type, t2));
93538fd1498Szrj }
93638fd1498Szrj
93738fd1498Szrj return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
93838fd1498Szrj fold_convert_loc (loc, type, t2));
93938fd1498Szrj }
94038fd1498Szrj
94138fd1498Szrj /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
94238fd1498Szrj for use in int_const_binop, size_binop and size_diffop. */
94338fd1498Szrj
94438fd1498Szrj static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)94538fd1498Szrj int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
94638fd1498Szrj {
94738fd1498Szrj if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
94838fd1498Szrj return false;
94938fd1498Szrj if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
95038fd1498Szrj return false;
95138fd1498Szrj
95238fd1498Szrj switch (code)
95338fd1498Szrj {
95438fd1498Szrj case LSHIFT_EXPR:
95538fd1498Szrj case RSHIFT_EXPR:
95638fd1498Szrj case LROTATE_EXPR:
95738fd1498Szrj case RROTATE_EXPR:
95838fd1498Szrj return true;
95938fd1498Szrj
96038fd1498Szrj default:
96138fd1498Szrj break;
96238fd1498Szrj }
96338fd1498Szrj
96438fd1498Szrj return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
96538fd1498Szrj && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
96638fd1498Szrj && TYPE_MODE (type1) == TYPE_MODE (type2);
96738fd1498Szrj }
96838fd1498Szrj
96938fd1498Szrj /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
97038fd1498Szrj
97138fd1498Szrj static tree
int_const_binop_2(enum tree_code code,const_tree parg1,const_tree parg2,int overflowable)97238fd1498Szrj int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
97338fd1498Szrj int overflowable)
97438fd1498Szrj {
97538fd1498Szrj wide_int res;
97638fd1498Szrj tree t;
97738fd1498Szrj tree type = TREE_TYPE (parg1);
97838fd1498Szrj signop sign = TYPE_SIGN (type);
97938fd1498Szrj bool overflow = false;
98038fd1498Szrj
98138fd1498Szrj wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
98238fd1498Szrj wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
98338fd1498Szrj
98438fd1498Szrj switch (code)
98538fd1498Szrj {
98638fd1498Szrj case BIT_IOR_EXPR:
98738fd1498Szrj res = wi::bit_or (arg1, arg2);
98838fd1498Szrj break;
98938fd1498Szrj
99038fd1498Szrj case BIT_XOR_EXPR:
99138fd1498Szrj res = wi::bit_xor (arg1, arg2);
99238fd1498Szrj break;
99338fd1498Szrj
99438fd1498Szrj case BIT_AND_EXPR:
99538fd1498Szrj res = wi::bit_and (arg1, arg2);
99638fd1498Szrj break;
99738fd1498Szrj
99838fd1498Szrj case RSHIFT_EXPR:
99938fd1498Szrj case LSHIFT_EXPR:
100038fd1498Szrj if (wi::neg_p (arg2))
100138fd1498Szrj {
100238fd1498Szrj arg2 = -arg2;
100338fd1498Szrj if (code == RSHIFT_EXPR)
100438fd1498Szrj code = LSHIFT_EXPR;
100538fd1498Szrj else
100638fd1498Szrj code = RSHIFT_EXPR;
100738fd1498Szrj }
100838fd1498Szrj
100938fd1498Szrj if (code == RSHIFT_EXPR)
101038fd1498Szrj /* It's unclear from the C standard whether shifts can overflow.
101138fd1498Szrj The following code ignores overflow; perhaps a C standard
101238fd1498Szrj interpretation ruling is needed. */
101338fd1498Szrj res = wi::rshift (arg1, arg2, sign);
101438fd1498Szrj else
101538fd1498Szrj res = wi::lshift (arg1, arg2);
101638fd1498Szrj break;
101738fd1498Szrj
101838fd1498Szrj case RROTATE_EXPR:
101938fd1498Szrj case LROTATE_EXPR:
102038fd1498Szrj if (wi::neg_p (arg2))
102138fd1498Szrj {
102238fd1498Szrj arg2 = -arg2;
102338fd1498Szrj if (code == RROTATE_EXPR)
102438fd1498Szrj code = LROTATE_EXPR;
102538fd1498Szrj else
102638fd1498Szrj code = RROTATE_EXPR;
102738fd1498Szrj }
102838fd1498Szrj
102938fd1498Szrj if (code == RROTATE_EXPR)
103038fd1498Szrj res = wi::rrotate (arg1, arg2);
103138fd1498Szrj else
103238fd1498Szrj res = wi::lrotate (arg1, arg2);
103338fd1498Szrj break;
103438fd1498Szrj
103538fd1498Szrj case PLUS_EXPR:
103638fd1498Szrj res = wi::add (arg1, arg2, sign, &overflow);
103738fd1498Szrj break;
103838fd1498Szrj
103938fd1498Szrj case MINUS_EXPR:
104038fd1498Szrj res = wi::sub (arg1, arg2, sign, &overflow);
104138fd1498Szrj break;
104238fd1498Szrj
104338fd1498Szrj case MULT_EXPR:
104438fd1498Szrj res = wi::mul (arg1, arg2, sign, &overflow);
104538fd1498Szrj break;
104638fd1498Szrj
104738fd1498Szrj case MULT_HIGHPART_EXPR:
104838fd1498Szrj res = wi::mul_high (arg1, arg2, sign);
104938fd1498Szrj break;
105038fd1498Szrj
105138fd1498Szrj case TRUNC_DIV_EXPR:
105238fd1498Szrj case EXACT_DIV_EXPR:
105338fd1498Szrj if (arg2 == 0)
105438fd1498Szrj return NULL_TREE;
105538fd1498Szrj res = wi::div_trunc (arg1, arg2, sign, &overflow);
105638fd1498Szrj break;
105738fd1498Szrj
105838fd1498Szrj case FLOOR_DIV_EXPR:
105938fd1498Szrj if (arg2 == 0)
106038fd1498Szrj return NULL_TREE;
106138fd1498Szrj res = wi::div_floor (arg1, arg2, sign, &overflow);
106238fd1498Szrj break;
106338fd1498Szrj
106438fd1498Szrj case CEIL_DIV_EXPR:
106538fd1498Szrj if (arg2 == 0)
106638fd1498Szrj return NULL_TREE;
106738fd1498Szrj res = wi::div_ceil (arg1, arg2, sign, &overflow);
106838fd1498Szrj break;
106938fd1498Szrj
107038fd1498Szrj case ROUND_DIV_EXPR:
107138fd1498Szrj if (arg2 == 0)
107238fd1498Szrj return NULL_TREE;
107338fd1498Szrj res = wi::div_round (arg1, arg2, sign, &overflow);
107438fd1498Szrj break;
107538fd1498Szrj
107638fd1498Szrj case TRUNC_MOD_EXPR:
107738fd1498Szrj if (arg2 == 0)
107838fd1498Szrj return NULL_TREE;
107938fd1498Szrj res = wi::mod_trunc (arg1, arg2, sign, &overflow);
108038fd1498Szrj break;
108138fd1498Szrj
108238fd1498Szrj case FLOOR_MOD_EXPR:
108338fd1498Szrj if (arg2 == 0)
108438fd1498Szrj return NULL_TREE;
108538fd1498Szrj res = wi::mod_floor (arg1, arg2, sign, &overflow);
108638fd1498Szrj break;
108738fd1498Szrj
108838fd1498Szrj case CEIL_MOD_EXPR:
108938fd1498Szrj if (arg2 == 0)
109038fd1498Szrj return NULL_TREE;
109138fd1498Szrj res = wi::mod_ceil (arg1, arg2, sign, &overflow);
109238fd1498Szrj break;
109338fd1498Szrj
109438fd1498Szrj case ROUND_MOD_EXPR:
109538fd1498Szrj if (arg2 == 0)
109638fd1498Szrj return NULL_TREE;
109738fd1498Szrj res = wi::mod_round (arg1, arg2, sign, &overflow);
109838fd1498Szrj break;
109938fd1498Szrj
110038fd1498Szrj case MIN_EXPR:
110138fd1498Szrj res = wi::min (arg1, arg2, sign);
110238fd1498Szrj break;
110338fd1498Szrj
110438fd1498Szrj case MAX_EXPR:
110538fd1498Szrj res = wi::max (arg1, arg2, sign);
110638fd1498Szrj break;
110738fd1498Szrj
110838fd1498Szrj default:
110938fd1498Szrj return NULL_TREE;
111038fd1498Szrj }
111138fd1498Szrj
111238fd1498Szrj t = force_fit_type (type, res, overflowable,
111338fd1498Szrj (((sign == SIGNED || overflowable == -1)
111438fd1498Szrj && overflow)
111538fd1498Szrj | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
111638fd1498Szrj
111738fd1498Szrj return t;
111838fd1498Szrj }
111938fd1498Szrj
112038fd1498Szrj /* Combine two integer constants PARG1 and PARG2 under operation CODE
112138fd1498Szrj to produce a new constant. Return NULL_TREE if we don't know how
112238fd1498Szrj to evaluate CODE at compile-time. */
112338fd1498Szrj
112438fd1498Szrj static tree
int_const_binop_1(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)112538fd1498Szrj int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
112638fd1498Szrj int overflowable)
112738fd1498Szrj {
112838fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
112938fd1498Szrj return int_const_binop_2 (code, arg1, arg2, overflowable);
113038fd1498Szrj
113138fd1498Szrj gcc_assert (NUM_POLY_INT_COEFFS != 1);
113238fd1498Szrj
113338fd1498Szrj if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
113438fd1498Szrj {
113538fd1498Szrj poly_wide_int res;
113638fd1498Szrj bool overflow;
113738fd1498Szrj tree type = TREE_TYPE (arg1);
113838fd1498Szrj signop sign = TYPE_SIGN (type);
113938fd1498Szrj switch (code)
114038fd1498Szrj {
114138fd1498Szrj case PLUS_EXPR:
114238fd1498Szrj res = wi::add (wi::to_poly_wide (arg1),
114338fd1498Szrj wi::to_poly_wide (arg2), sign, &overflow);
114438fd1498Szrj break;
114538fd1498Szrj
114638fd1498Szrj case MINUS_EXPR:
114738fd1498Szrj res = wi::sub (wi::to_poly_wide (arg1),
114838fd1498Szrj wi::to_poly_wide (arg2), sign, &overflow);
114938fd1498Szrj break;
115038fd1498Szrj
115138fd1498Szrj case MULT_EXPR:
115238fd1498Szrj if (TREE_CODE (arg2) == INTEGER_CST)
115338fd1498Szrj res = wi::mul (wi::to_poly_wide (arg1),
115438fd1498Szrj wi::to_wide (arg2), sign, &overflow);
115538fd1498Szrj else if (TREE_CODE (arg1) == INTEGER_CST)
115638fd1498Szrj res = wi::mul (wi::to_poly_wide (arg2),
115738fd1498Szrj wi::to_wide (arg1), sign, &overflow);
115838fd1498Szrj else
115938fd1498Szrj return NULL_TREE;
116038fd1498Szrj break;
116138fd1498Szrj
116238fd1498Szrj case LSHIFT_EXPR:
116338fd1498Szrj if (TREE_CODE (arg2) == INTEGER_CST)
116438fd1498Szrj res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
116538fd1498Szrj else
116638fd1498Szrj return NULL_TREE;
116738fd1498Szrj break;
116838fd1498Szrj
116938fd1498Szrj case BIT_IOR_EXPR:
117038fd1498Szrj if (TREE_CODE (arg2) != INTEGER_CST
117138fd1498Szrj || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
117238fd1498Szrj &res))
117338fd1498Szrj return NULL_TREE;
117438fd1498Szrj break;
117538fd1498Szrj
117638fd1498Szrj default:
117738fd1498Szrj return NULL_TREE;
117838fd1498Szrj }
117938fd1498Szrj return force_fit_type (type, res, overflowable,
118038fd1498Szrj (((sign == SIGNED || overflowable == -1)
118138fd1498Szrj && overflow)
118238fd1498Szrj | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
118338fd1498Szrj }
118438fd1498Szrj
118538fd1498Szrj return NULL_TREE;
118638fd1498Szrj }
118738fd1498Szrj
118838fd1498Szrj tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2)118938fd1498Szrj int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
119038fd1498Szrj {
119138fd1498Szrj return int_const_binop_1 (code, arg1, arg2, 1);
119238fd1498Szrj }
119338fd1498Szrj
119438fd1498Szrj /* Return true if binary operation OP distributes over addition in operand
119538fd1498Szrj OPNO, with the other operand being held constant. OPNO counts from 1. */
119638fd1498Szrj
119738fd1498Szrj static bool
distributes_over_addition_p(tree_code op,int opno)119838fd1498Szrj distributes_over_addition_p (tree_code op, int opno)
119938fd1498Szrj {
120038fd1498Szrj switch (op)
120138fd1498Szrj {
120238fd1498Szrj case PLUS_EXPR:
120338fd1498Szrj case MINUS_EXPR:
120438fd1498Szrj case MULT_EXPR:
120538fd1498Szrj return true;
120638fd1498Szrj
120738fd1498Szrj case LSHIFT_EXPR:
120838fd1498Szrj return opno == 1;
120938fd1498Szrj
121038fd1498Szrj default:
121138fd1498Szrj return false;
121238fd1498Szrj }
121338fd1498Szrj }
121438fd1498Szrj
121538fd1498Szrj /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
121638fd1498Szrj constant. We assume ARG1 and ARG2 have the same data type, or at least
121738fd1498Szrj are the same kind of constant and the same machine mode. Return zero if
121838fd1498Szrj combining the constants is not allowed in the current operating mode. */
121938fd1498Szrj
122038fd1498Szrj static tree
const_binop(enum tree_code code,tree arg1,tree arg2)122138fd1498Szrj const_binop (enum tree_code code, tree arg1, tree arg2)
122238fd1498Szrj {
122338fd1498Szrj /* Sanity check for the recursive cases. */
122438fd1498Szrj if (!arg1 || !arg2)
122538fd1498Szrj return NULL_TREE;
122638fd1498Szrj
122738fd1498Szrj STRIP_NOPS (arg1);
122838fd1498Szrj STRIP_NOPS (arg2);
122938fd1498Szrj
123038fd1498Szrj if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
123138fd1498Szrj {
123238fd1498Szrj if (code == POINTER_PLUS_EXPR)
123338fd1498Szrj return int_const_binop (PLUS_EXPR,
123438fd1498Szrj arg1, fold_convert (TREE_TYPE (arg1), arg2));
123538fd1498Szrj
123638fd1498Szrj return int_const_binop (code, arg1, arg2);
123738fd1498Szrj }
123838fd1498Szrj
123938fd1498Szrj if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
124038fd1498Szrj {
124138fd1498Szrj machine_mode mode;
124238fd1498Szrj REAL_VALUE_TYPE d1;
124338fd1498Szrj REAL_VALUE_TYPE d2;
124438fd1498Szrj REAL_VALUE_TYPE value;
124538fd1498Szrj REAL_VALUE_TYPE result;
124638fd1498Szrj bool inexact;
124738fd1498Szrj tree t, type;
124838fd1498Szrj
124938fd1498Szrj /* The following codes are handled by real_arithmetic. */
125038fd1498Szrj switch (code)
125138fd1498Szrj {
125238fd1498Szrj case PLUS_EXPR:
125338fd1498Szrj case MINUS_EXPR:
125438fd1498Szrj case MULT_EXPR:
125538fd1498Szrj case RDIV_EXPR:
125638fd1498Szrj case MIN_EXPR:
125738fd1498Szrj case MAX_EXPR:
125838fd1498Szrj break;
125938fd1498Szrj
126038fd1498Szrj default:
126138fd1498Szrj return NULL_TREE;
126238fd1498Szrj }
126338fd1498Szrj
126438fd1498Szrj d1 = TREE_REAL_CST (arg1);
126538fd1498Szrj d2 = TREE_REAL_CST (arg2);
126638fd1498Szrj
126738fd1498Szrj type = TREE_TYPE (arg1);
126838fd1498Szrj mode = TYPE_MODE (type);
126938fd1498Szrj
127038fd1498Szrj /* Don't perform operation if we honor signaling NaNs and
127138fd1498Szrj either operand is a signaling NaN. */
127238fd1498Szrj if (HONOR_SNANS (mode)
127338fd1498Szrj && (REAL_VALUE_ISSIGNALING_NAN (d1)
127438fd1498Szrj || REAL_VALUE_ISSIGNALING_NAN (d2)))
127538fd1498Szrj return NULL_TREE;
127638fd1498Szrj
127738fd1498Szrj /* Don't perform operation if it would raise a division
127838fd1498Szrj by zero exception. */
127938fd1498Szrj if (code == RDIV_EXPR
128038fd1498Szrj && real_equal (&d2, &dconst0)
128138fd1498Szrj && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
128238fd1498Szrj return NULL_TREE;
128338fd1498Szrj
128438fd1498Szrj /* If either operand is a NaN, just return it. Otherwise, set up
128538fd1498Szrj for floating-point trap; we return an overflow. */
128638fd1498Szrj if (REAL_VALUE_ISNAN (d1))
128738fd1498Szrj {
128838fd1498Szrj /* Make resulting NaN value to be qNaN when flag_signaling_nans
128938fd1498Szrj is off. */
129038fd1498Szrj d1.signalling = 0;
129138fd1498Szrj t = build_real (type, d1);
129238fd1498Szrj return t;
129338fd1498Szrj }
129438fd1498Szrj else if (REAL_VALUE_ISNAN (d2))
129538fd1498Szrj {
129638fd1498Szrj /* Make resulting NaN value to be qNaN when flag_signaling_nans
129738fd1498Szrj is off. */
129838fd1498Szrj d2.signalling = 0;
129938fd1498Szrj t = build_real (type, d2);
130038fd1498Szrj return t;
130138fd1498Szrj }
130238fd1498Szrj
130338fd1498Szrj inexact = real_arithmetic (&value, code, &d1, &d2);
130438fd1498Szrj real_convert (&result, mode, &value);
130538fd1498Szrj
130638fd1498Szrj /* Don't constant fold this floating point operation if
130738fd1498Szrj the result has overflowed and flag_trapping_math. */
130838fd1498Szrj if (flag_trapping_math
130938fd1498Szrj && MODE_HAS_INFINITIES (mode)
131038fd1498Szrj && REAL_VALUE_ISINF (result)
131138fd1498Szrj && !REAL_VALUE_ISINF (d1)
131238fd1498Szrj && !REAL_VALUE_ISINF (d2))
131338fd1498Szrj return NULL_TREE;
131438fd1498Szrj
131538fd1498Szrj /* Don't constant fold this floating point operation if the
131638fd1498Szrj result may dependent upon the run-time rounding mode and
131738fd1498Szrj flag_rounding_math is set, or if GCC's software emulation
131838fd1498Szrj is unable to accurately represent the result. */
131938fd1498Szrj if ((flag_rounding_math
132038fd1498Szrj || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
132138fd1498Szrj && (inexact || !real_identical (&result, &value)))
132238fd1498Szrj return NULL_TREE;
132338fd1498Szrj
132438fd1498Szrj t = build_real (type, result);
132538fd1498Szrj
132638fd1498Szrj TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
132738fd1498Szrj return t;
132838fd1498Szrj }
132938fd1498Szrj
133038fd1498Szrj if (TREE_CODE (arg1) == FIXED_CST)
133138fd1498Szrj {
133238fd1498Szrj FIXED_VALUE_TYPE f1;
133338fd1498Szrj FIXED_VALUE_TYPE f2;
133438fd1498Szrj FIXED_VALUE_TYPE result;
133538fd1498Szrj tree t, type;
133638fd1498Szrj int sat_p;
133738fd1498Szrj bool overflow_p;
133838fd1498Szrj
133938fd1498Szrj /* The following codes are handled by fixed_arithmetic. */
134038fd1498Szrj switch (code)
134138fd1498Szrj {
134238fd1498Szrj case PLUS_EXPR:
134338fd1498Szrj case MINUS_EXPR:
134438fd1498Szrj case MULT_EXPR:
134538fd1498Szrj case TRUNC_DIV_EXPR:
134638fd1498Szrj if (TREE_CODE (arg2) != FIXED_CST)
134738fd1498Szrj return NULL_TREE;
134838fd1498Szrj f2 = TREE_FIXED_CST (arg2);
134938fd1498Szrj break;
135038fd1498Szrj
135138fd1498Szrj case LSHIFT_EXPR:
135238fd1498Szrj case RSHIFT_EXPR:
135338fd1498Szrj {
135438fd1498Szrj if (TREE_CODE (arg2) != INTEGER_CST)
135538fd1498Szrj return NULL_TREE;
135638fd1498Szrj wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
135738fd1498Szrj f2.data.high = w2.elt (1);
135838fd1498Szrj f2.data.low = w2.ulow ();
135938fd1498Szrj f2.mode = SImode;
136038fd1498Szrj }
136138fd1498Szrj break;
136238fd1498Szrj
136338fd1498Szrj default:
136438fd1498Szrj return NULL_TREE;
136538fd1498Szrj }
136638fd1498Szrj
136738fd1498Szrj f1 = TREE_FIXED_CST (arg1);
136838fd1498Szrj type = TREE_TYPE (arg1);
136938fd1498Szrj sat_p = TYPE_SATURATING (type);
137038fd1498Szrj overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
137138fd1498Szrj t = build_fixed (type, result);
137238fd1498Szrj /* Propagate overflow flags. */
137338fd1498Szrj if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
137438fd1498Szrj TREE_OVERFLOW (t) = 1;
137538fd1498Szrj return t;
137638fd1498Szrj }
137738fd1498Szrj
137838fd1498Szrj if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
137938fd1498Szrj {
138038fd1498Szrj tree type = TREE_TYPE (arg1);
138138fd1498Szrj tree r1 = TREE_REALPART (arg1);
138238fd1498Szrj tree i1 = TREE_IMAGPART (arg1);
138338fd1498Szrj tree r2 = TREE_REALPART (arg2);
138438fd1498Szrj tree i2 = TREE_IMAGPART (arg2);
138538fd1498Szrj tree real, imag;
138638fd1498Szrj
138738fd1498Szrj switch (code)
138838fd1498Szrj {
138938fd1498Szrj case PLUS_EXPR:
139038fd1498Szrj case MINUS_EXPR:
139138fd1498Szrj real = const_binop (code, r1, r2);
139238fd1498Szrj imag = const_binop (code, i1, i2);
139338fd1498Szrj break;
139438fd1498Szrj
139538fd1498Szrj case MULT_EXPR:
139638fd1498Szrj if (COMPLEX_FLOAT_TYPE_P (type))
139738fd1498Szrj return do_mpc_arg2 (arg1, arg2, type,
139838fd1498Szrj /* do_nonfinite= */ folding_initializer,
139938fd1498Szrj mpc_mul);
140038fd1498Szrj
140138fd1498Szrj real = const_binop (MINUS_EXPR,
140238fd1498Szrj const_binop (MULT_EXPR, r1, r2),
140338fd1498Szrj const_binop (MULT_EXPR, i1, i2));
140438fd1498Szrj imag = const_binop (PLUS_EXPR,
140538fd1498Szrj const_binop (MULT_EXPR, r1, i2),
140638fd1498Szrj const_binop (MULT_EXPR, i1, r2));
140738fd1498Szrj break;
140838fd1498Szrj
140938fd1498Szrj case RDIV_EXPR:
141038fd1498Szrj if (COMPLEX_FLOAT_TYPE_P (type))
141138fd1498Szrj return do_mpc_arg2 (arg1, arg2, type,
141238fd1498Szrj /* do_nonfinite= */ folding_initializer,
141338fd1498Szrj mpc_div);
141438fd1498Szrj /* Fallthru. */
141538fd1498Szrj case TRUNC_DIV_EXPR:
141638fd1498Szrj case CEIL_DIV_EXPR:
141738fd1498Szrj case FLOOR_DIV_EXPR:
141838fd1498Szrj case ROUND_DIV_EXPR:
141938fd1498Szrj if (flag_complex_method == 0)
142038fd1498Szrj {
142138fd1498Szrj /* Keep this algorithm in sync with
142238fd1498Szrj tree-complex.c:expand_complex_div_straight().
142338fd1498Szrj
142438fd1498Szrj Expand complex division to scalars, straightforward algorithm.
142538fd1498Szrj a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
142638fd1498Szrj t = br*br + bi*bi
142738fd1498Szrj */
142838fd1498Szrj tree magsquared
142938fd1498Szrj = const_binop (PLUS_EXPR,
143038fd1498Szrj const_binop (MULT_EXPR, r2, r2),
143138fd1498Szrj const_binop (MULT_EXPR, i2, i2));
143238fd1498Szrj tree t1
143338fd1498Szrj = const_binop (PLUS_EXPR,
143438fd1498Szrj const_binop (MULT_EXPR, r1, r2),
143538fd1498Szrj const_binop (MULT_EXPR, i1, i2));
143638fd1498Szrj tree t2
143738fd1498Szrj = const_binop (MINUS_EXPR,
143838fd1498Szrj const_binop (MULT_EXPR, i1, r2),
143938fd1498Szrj const_binop (MULT_EXPR, r1, i2));
144038fd1498Szrj
144138fd1498Szrj real = const_binop (code, t1, magsquared);
144238fd1498Szrj imag = const_binop (code, t2, magsquared);
144338fd1498Szrj }
144438fd1498Szrj else
144538fd1498Szrj {
144638fd1498Szrj /* Keep this algorithm in sync with
144738fd1498Szrj tree-complex.c:expand_complex_div_wide().
144838fd1498Szrj
144938fd1498Szrj Expand complex division to scalars, modified algorithm to minimize
145038fd1498Szrj overflow with wide input ranges. */
145138fd1498Szrj tree compare = fold_build2 (LT_EXPR, boolean_type_node,
145238fd1498Szrj fold_abs_const (r2, TREE_TYPE (type)),
145338fd1498Szrj fold_abs_const (i2, TREE_TYPE (type)));
145438fd1498Szrj
145538fd1498Szrj if (integer_nonzerop (compare))
145638fd1498Szrj {
145738fd1498Szrj /* In the TRUE branch, we compute
145838fd1498Szrj ratio = br/bi;
145938fd1498Szrj div = (br * ratio) + bi;
146038fd1498Szrj tr = (ar * ratio) + ai;
146138fd1498Szrj ti = (ai * ratio) - ar;
146238fd1498Szrj tr = tr / div;
146338fd1498Szrj ti = ti / div; */
146438fd1498Szrj tree ratio = const_binop (code, r2, i2);
146538fd1498Szrj tree div = const_binop (PLUS_EXPR, i2,
146638fd1498Szrj const_binop (MULT_EXPR, r2, ratio));
146738fd1498Szrj real = const_binop (MULT_EXPR, r1, ratio);
146838fd1498Szrj real = const_binop (PLUS_EXPR, real, i1);
146938fd1498Szrj real = const_binop (code, real, div);
147038fd1498Szrj
147138fd1498Szrj imag = const_binop (MULT_EXPR, i1, ratio);
147238fd1498Szrj imag = const_binop (MINUS_EXPR, imag, r1);
147338fd1498Szrj imag = const_binop (code, imag, div);
147438fd1498Szrj }
147538fd1498Szrj else
147638fd1498Szrj {
147738fd1498Szrj /* In the FALSE branch, we compute
147838fd1498Szrj ratio = d/c;
147938fd1498Szrj divisor = (d * ratio) + c;
148038fd1498Szrj tr = (b * ratio) + a;
148138fd1498Szrj ti = b - (a * ratio);
148238fd1498Szrj tr = tr / div;
148338fd1498Szrj ti = ti / div; */
148438fd1498Szrj tree ratio = const_binop (code, i2, r2);
148538fd1498Szrj tree div = const_binop (PLUS_EXPR, r2,
148638fd1498Szrj const_binop (MULT_EXPR, i2, ratio));
148738fd1498Szrj
148838fd1498Szrj real = const_binop (MULT_EXPR, i1, ratio);
148938fd1498Szrj real = const_binop (PLUS_EXPR, real, r1);
149038fd1498Szrj real = const_binop (code, real, div);
149138fd1498Szrj
149238fd1498Szrj imag = const_binop (MULT_EXPR, r1, ratio);
149338fd1498Szrj imag = const_binop (MINUS_EXPR, i1, imag);
149438fd1498Szrj imag = const_binop (code, imag, div);
149538fd1498Szrj }
149638fd1498Szrj }
149738fd1498Szrj break;
149838fd1498Szrj
149938fd1498Szrj default:
150038fd1498Szrj return NULL_TREE;
150138fd1498Szrj }
150238fd1498Szrj
150338fd1498Szrj if (real && imag)
150438fd1498Szrj return build_complex (type, real, imag);
150538fd1498Szrj }
150638fd1498Szrj
150738fd1498Szrj if (TREE_CODE (arg1) == VECTOR_CST
150838fd1498Szrj && TREE_CODE (arg2) == VECTOR_CST
150938fd1498Szrj && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
151038fd1498Szrj TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
151138fd1498Szrj {
151238fd1498Szrj tree type = TREE_TYPE (arg1);
151338fd1498Szrj bool step_ok_p;
151438fd1498Szrj if (VECTOR_CST_STEPPED_P (arg1)
151538fd1498Szrj && VECTOR_CST_STEPPED_P (arg2))
151638fd1498Szrj /* We can operate directly on the encoding if:
151738fd1498Szrj
151838fd1498Szrj a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
151938fd1498Szrj implies
152038fd1498Szrj (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
152138fd1498Szrj
152238fd1498Szrj Addition and subtraction are the supported operators
152338fd1498Szrj for which this is true. */
152438fd1498Szrj step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
152538fd1498Szrj else if (VECTOR_CST_STEPPED_P (arg1))
152638fd1498Szrj /* We can operate directly on stepped encodings if:
152738fd1498Szrj
152838fd1498Szrj a3 - a2 == a2 - a1
152938fd1498Szrj implies:
153038fd1498Szrj (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
153138fd1498Szrj
153238fd1498Szrj which is true if (x -> x op c) distributes over addition. */
153338fd1498Szrj step_ok_p = distributes_over_addition_p (code, 1);
153438fd1498Szrj else
153538fd1498Szrj /* Similarly in reverse. */
153638fd1498Szrj step_ok_p = distributes_over_addition_p (code, 2);
153738fd1498Szrj tree_vector_builder elts;
153838fd1498Szrj if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
153938fd1498Szrj return NULL_TREE;
154038fd1498Szrj unsigned int count = elts.encoded_nelts ();
154138fd1498Szrj for (unsigned int i = 0; i < count; ++i)
154238fd1498Szrj {
154338fd1498Szrj tree elem1 = VECTOR_CST_ELT (arg1, i);
154438fd1498Szrj tree elem2 = VECTOR_CST_ELT (arg2, i);
154538fd1498Szrj
154638fd1498Szrj tree elt = const_binop (code, elem1, elem2);
154738fd1498Szrj
154838fd1498Szrj /* It is possible that const_binop cannot handle the given
154938fd1498Szrj code and return NULL_TREE */
155038fd1498Szrj if (elt == NULL_TREE)
155138fd1498Szrj return NULL_TREE;
155238fd1498Szrj elts.quick_push (elt);
155338fd1498Szrj }
155438fd1498Szrj
155538fd1498Szrj return elts.build ();
155638fd1498Szrj }
155738fd1498Szrj
155838fd1498Szrj /* Shifts allow a scalar offset for a vector. */
155938fd1498Szrj if (TREE_CODE (arg1) == VECTOR_CST
156038fd1498Szrj && TREE_CODE (arg2) == INTEGER_CST)
156138fd1498Szrj {
156238fd1498Szrj tree type = TREE_TYPE (arg1);
156338fd1498Szrj bool step_ok_p = distributes_over_addition_p (code, 1);
156438fd1498Szrj tree_vector_builder elts;
156538fd1498Szrj if (!elts.new_unary_operation (type, arg1, step_ok_p))
156638fd1498Szrj return NULL_TREE;
156738fd1498Szrj unsigned int count = elts.encoded_nelts ();
156838fd1498Szrj for (unsigned int i = 0; i < count; ++i)
156938fd1498Szrj {
157038fd1498Szrj tree elem1 = VECTOR_CST_ELT (arg1, i);
157138fd1498Szrj
157238fd1498Szrj tree elt = const_binop (code, elem1, arg2);
157338fd1498Szrj
157438fd1498Szrj /* It is possible that const_binop cannot handle the given
157538fd1498Szrj code and return NULL_TREE. */
157638fd1498Szrj if (elt == NULL_TREE)
157738fd1498Szrj return NULL_TREE;
157838fd1498Szrj elts.quick_push (elt);
157938fd1498Szrj }
158038fd1498Szrj
158138fd1498Szrj return elts.build ();
158238fd1498Szrj }
158338fd1498Szrj return NULL_TREE;
158438fd1498Szrj }
158538fd1498Szrj
158638fd1498Szrj /* Overload that adds a TYPE parameter to be able to dispatch
158738fd1498Szrj to fold_relational_const. */
158838fd1498Szrj
158938fd1498Szrj tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)159038fd1498Szrj const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
159138fd1498Szrj {
159238fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_comparison)
159338fd1498Szrj return fold_relational_const (code, type, arg1, arg2);
159438fd1498Szrj
159538fd1498Szrj /* ??? Until we make the const_binop worker take the type of the
159638fd1498Szrj result as argument put those cases that need it here. */
159738fd1498Szrj switch (code)
159838fd1498Szrj {
159938fd1498Szrj case VEC_SERIES_EXPR:
160038fd1498Szrj if (CONSTANT_CLASS_P (arg1)
160138fd1498Szrj && CONSTANT_CLASS_P (arg2))
160238fd1498Szrj return build_vec_series (type, arg1, arg2);
160338fd1498Szrj return NULL_TREE;
160438fd1498Szrj
160538fd1498Szrj case COMPLEX_EXPR:
160638fd1498Szrj if ((TREE_CODE (arg1) == REAL_CST
160738fd1498Szrj && TREE_CODE (arg2) == REAL_CST)
160838fd1498Szrj || (TREE_CODE (arg1) == INTEGER_CST
160938fd1498Szrj && TREE_CODE (arg2) == INTEGER_CST))
161038fd1498Szrj return build_complex (type, arg1, arg2);
161138fd1498Szrj return NULL_TREE;
161238fd1498Szrj
161338fd1498Szrj case POINTER_DIFF_EXPR:
161438fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
161538fd1498Szrj {
161638fd1498Szrj offset_int res = wi::sub (wi::to_offset (arg1),
161738fd1498Szrj wi::to_offset (arg2));
161838fd1498Szrj return force_fit_type (type, res, 1,
161938fd1498Szrj TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
162038fd1498Szrj }
162138fd1498Szrj return NULL_TREE;
162238fd1498Szrj
162338fd1498Szrj case VEC_PACK_TRUNC_EXPR:
162438fd1498Szrj case VEC_PACK_FIX_TRUNC_EXPR:
162538fd1498Szrj {
162638fd1498Szrj unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
162738fd1498Szrj
162838fd1498Szrj if (TREE_CODE (arg1) != VECTOR_CST
162938fd1498Szrj || TREE_CODE (arg2) != VECTOR_CST)
163038fd1498Szrj return NULL_TREE;
163138fd1498Szrj
163238fd1498Szrj if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
163338fd1498Szrj return NULL_TREE;
163438fd1498Szrj
163538fd1498Szrj out_nelts = in_nelts * 2;
163638fd1498Szrj gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
163738fd1498Szrj && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
163838fd1498Szrj
163938fd1498Szrj tree_vector_builder elts (type, out_nelts, 1);
164038fd1498Szrj for (i = 0; i < out_nelts; i++)
164138fd1498Szrj {
164238fd1498Szrj tree elt = (i < in_nelts
164338fd1498Szrj ? VECTOR_CST_ELT (arg1, i)
164438fd1498Szrj : VECTOR_CST_ELT (arg2, i - in_nelts));
164538fd1498Szrj elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
164638fd1498Szrj ? NOP_EXPR : FIX_TRUNC_EXPR,
164738fd1498Szrj TREE_TYPE (type), elt);
164838fd1498Szrj if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
164938fd1498Szrj return NULL_TREE;
165038fd1498Szrj elts.quick_push (elt);
165138fd1498Szrj }
165238fd1498Szrj
165338fd1498Szrj return elts.build ();
165438fd1498Szrj }
165538fd1498Szrj
165638fd1498Szrj case VEC_WIDEN_MULT_LO_EXPR:
165738fd1498Szrj case VEC_WIDEN_MULT_HI_EXPR:
165838fd1498Szrj case VEC_WIDEN_MULT_EVEN_EXPR:
165938fd1498Szrj case VEC_WIDEN_MULT_ODD_EXPR:
166038fd1498Szrj {
166138fd1498Szrj unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
166238fd1498Szrj
166338fd1498Szrj if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
166438fd1498Szrj return NULL_TREE;
166538fd1498Szrj
166638fd1498Szrj if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
166738fd1498Szrj return NULL_TREE;
166838fd1498Szrj out_nelts = in_nelts / 2;
166938fd1498Szrj gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
167038fd1498Szrj && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
167138fd1498Szrj
167238fd1498Szrj if (code == VEC_WIDEN_MULT_LO_EXPR)
167338fd1498Szrj scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
167438fd1498Szrj else if (code == VEC_WIDEN_MULT_HI_EXPR)
167538fd1498Szrj scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
167638fd1498Szrj else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
167738fd1498Szrj scale = 1, ofs = 0;
167838fd1498Szrj else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
167938fd1498Szrj scale = 1, ofs = 1;
168038fd1498Szrj
168138fd1498Szrj tree_vector_builder elts (type, out_nelts, 1);
168238fd1498Szrj for (out = 0; out < out_nelts; out++)
168338fd1498Szrj {
168438fd1498Szrj unsigned int in = (out << scale) + ofs;
168538fd1498Szrj tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
168638fd1498Szrj VECTOR_CST_ELT (arg1, in));
168738fd1498Szrj tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
168838fd1498Szrj VECTOR_CST_ELT (arg2, in));
168938fd1498Szrj
169038fd1498Szrj if (t1 == NULL_TREE || t2 == NULL_TREE)
169138fd1498Szrj return NULL_TREE;
169238fd1498Szrj tree elt = const_binop (MULT_EXPR, t1, t2);
169338fd1498Szrj if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
169438fd1498Szrj return NULL_TREE;
169538fd1498Szrj elts.quick_push (elt);
169638fd1498Szrj }
169738fd1498Szrj
169838fd1498Szrj return elts.build ();
169938fd1498Szrj }
170038fd1498Szrj
170138fd1498Szrj default:;
170238fd1498Szrj }
170338fd1498Szrj
170438fd1498Szrj if (TREE_CODE_CLASS (code) != tcc_binary)
170538fd1498Szrj return NULL_TREE;
170638fd1498Szrj
170738fd1498Szrj /* Make sure type and arg0 have the same saturating flag. */
170838fd1498Szrj gcc_checking_assert (TYPE_SATURATING (type)
170938fd1498Szrj == TYPE_SATURATING (TREE_TYPE (arg1)));
171038fd1498Szrj
171138fd1498Szrj return const_binop (code, arg1, arg2);
171238fd1498Szrj }
171338fd1498Szrj
171438fd1498Szrj /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
171538fd1498Szrj Return zero if computing the constants is not possible. */
171638fd1498Szrj
171738fd1498Szrj tree
const_unop(enum tree_code code,tree type,tree arg0)171838fd1498Szrj const_unop (enum tree_code code, tree type, tree arg0)
171938fd1498Szrj {
172038fd1498Szrj /* Don't perform the operation, other than NEGATE and ABS, if
172138fd1498Szrj flag_signaling_nans is on and the operand is a signaling NaN. */
172238fd1498Szrj if (TREE_CODE (arg0) == REAL_CST
172338fd1498Szrj && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
172438fd1498Szrj && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
172538fd1498Szrj && code != NEGATE_EXPR
172638fd1498Szrj && code != ABS_EXPR)
172738fd1498Szrj return NULL_TREE;
172838fd1498Szrj
172938fd1498Szrj switch (code)
173038fd1498Szrj {
173138fd1498Szrj CASE_CONVERT:
173238fd1498Szrj case FLOAT_EXPR:
173338fd1498Szrj case FIX_TRUNC_EXPR:
173438fd1498Szrj case FIXED_CONVERT_EXPR:
173538fd1498Szrj return fold_convert_const (code, type, arg0);
173638fd1498Szrj
173738fd1498Szrj case ADDR_SPACE_CONVERT_EXPR:
173838fd1498Szrj /* If the source address is 0, and the source address space
173938fd1498Szrj cannot have a valid object at 0, fold to dest type null. */
174038fd1498Szrj if (integer_zerop (arg0)
174138fd1498Szrj && !(targetm.addr_space.zero_address_valid
174238fd1498Szrj (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
174338fd1498Szrj return fold_convert_const (code, type, arg0);
174438fd1498Szrj break;
174538fd1498Szrj
174638fd1498Szrj case VIEW_CONVERT_EXPR:
174738fd1498Szrj return fold_view_convert_expr (type, arg0);
174838fd1498Szrj
174938fd1498Szrj case NEGATE_EXPR:
175038fd1498Szrj {
175138fd1498Szrj /* Can't call fold_negate_const directly here as that doesn't
175238fd1498Szrj handle all cases and we might not be able to negate some
175338fd1498Szrj constants. */
175438fd1498Szrj tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
175538fd1498Szrj if (tem && CONSTANT_CLASS_P (tem))
175638fd1498Szrj return tem;
175738fd1498Szrj break;
175838fd1498Szrj }
175938fd1498Szrj
176038fd1498Szrj case ABS_EXPR:
176138fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
176238fd1498Szrj return fold_abs_const (arg0, type);
176338fd1498Szrj break;
176438fd1498Szrj
176538fd1498Szrj case CONJ_EXPR:
176638fd1498Szrj if (TREE_CODE (arg0) == COMPLEX_CST)
176738fd1498Szrj {
176838fd1498Szrj tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
176938fd1498Szrj TREE_TYPE (type));
177038fd1498Szrj return build_complex (type, TREE_REALPART (arg0), ipart);
177138fd1498Szrj }
177238fd1498Szrj break;
177338fd1498Szrj
177438fd1498Szrj case BIT_NOT_EXPR:
177538fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST)
177638fd1498Szrj return fold_not_const (arg0, type);
177738fd1498Szrj else if (POLY_INT_CST_P (arg0))
177838fd1498Szrj return wide_int_to_tree (type, -poly_int_cst_value (arg0));
177938fd1498Szrj /* Perform BIT_NOT_EXPR on each element individually. */
178038fd1498Szrj else if (TREE_CODE (arg0) == VECTOR_CST)
178138fd1498Szrj {
178238fd1498Szrj tree elem;
178338fd1498Szrj
178438fd1498Szrj /* This can cope with stepped encodings because ~x == -1 - x. */
178538fd1498Szrj tree_vector_builder elements;
178638fd1498Szrj elements.new_unary_operation (type, arg0, true);
178738fd1498Szrj unsigned int i, count = elements.encoded_nelts ();
178838fd1498Szrj for (i = 0; i < count; ++i)
178938fd1498Szrj {
179038fd1498Szrj elem = VECTOR_CST_ELT (arg0, i);
179138fd1498Szrj elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
179238fd1498Szrj if (elem == NULL_TREE)
179338fd1498Szrj break;
179438fd1498Szrj elements.quick_push (elem);
179538fd1498Szrj }
179638fd1498Szrj if (i == count)
179738fd1498Szrj return elements.build ();
179838fd1498Szrj }
179938fd1498Szrj break;
180038fd1498Szrj
180138fd1498Szrj case TRUTH_NOT_EXPR:
180238fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST)
180338fd1498Szrj return constant_boolean_node (integer_zerop (arg0), type);
180438fd1498Szrj break;
180538fd1498Szrj
180638fd1498Szrj case REALPART_EXPR:
180738fd1498Szrj if (TREE_CODE (arg0) == COMPLEX_CST)
180838fd1498Szrj return fold_convert (type, TREE_REALPART (arg0));
180938fd1498Szrj break;
181038fd1498Szrj
181138fd1498Szrj case IMAGPART_EXPR:
181238fd1498Szrj if (TREE_CODE (arg0) == COMPLEX_CST)
181338fd1498Szrj return fold_convert (type, TREE_IMAGPART (arg0));
181438fd1498Szrj break;
181538fd1498Szrj
181638fd1498Szrj case VEC_UNPACK_LO_EXPR:
181738fd1498Szrj case VEC_UNPACK_HI_EXPR:
181838fd1498Szrj case VEC_UNPACK_FLOAT_LO_EXPR:
181938fd1498Szrj case VEC_UNPACK_FLOAT_HI_EXPR:
182038fd1498Szrj {
182138fd1498Szrj unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
182238fd1498Szrj enum tree_code subcode;
182338fd1498Szrj
182438fd1498Szrj if (TREE_CODE (arg0) != VECTOR_CST)
182538fd1498Szrj return NULL_TREE;
182638fd1498Szrj
182738fd1498Szrj if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
182838fd1498Szrj return NULL_TREE;
182938fd1498Szrj out_nelts = in_nelts / 2;
183038fd1498Szrj gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
183138fd1498Szrj
183238fd1498Szrj unsigned int offset = 0;
183338fd1498Szrj if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
183438fd1498Szrj || code == VEC_UNPACK_FLOAT_LO_EXPR))
183538fd1498Szrj offset = out_nelts;
183638fd1498Szrj
183738fd1498Szrj if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
183838fd1498Szrj subcode = NOP_EXPR;
183938fd1498Szrj else
184038fd1498Szrj subcode = FLOAT_EXPR;
184138fd1498Szrj
184238fd1498Szrj tree_vector_builder elts (type, out_nelts, 1);
184338fd1498Szrj for (i = 0; i < out_nelts; i++)
184438fd1498Szrj {
184538fd1498Szrj tree elt = fold_convert_const (subcode, TREE_TYPE (type),
184638fd1498Szrj VECTOR_CST_ELT (arg0, i + offset));
184738fd1498Szrj if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
184838fd1498Szrj return NULL_TREE;
184938fd1498Szrj elts.quick_push (elt);
185038fd1498Szrj }
185138fd1498Szrj
185238fd1498Szrj return elts.build ();
185338fd1498Szrj }
185438fd1498Szrj
185538fd1498Szrj case VEC_DUPLICATE_EXPR:
185638fd1498Szrj if (CONSTANT_CLASS_P (arg0))
185738fd1498Szrj return build_vector_from_val (type, arg0);
185838fd1498Szrj return NULL_TREE;
185938fd1498Szrj
186038fd1498Szrj default:
186138fd1498Szrj break;
186238fd1498Szrj }
186338fd1498Szrj
186438fd1498Szrj return NULL_TREE;
186538fd1498Szrj }
186638fd1498Szrj
186738fd1498Szrj /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
186838fd1498Szrj indicates which particular sizetype to create. */
186938fd1498Szrj
187038fd1498Szrj tree
size_int_kind(poly_int64 number,enum size_type_kind kind)187138fd1498Szrj size_int_kind (poly_int64 number, enum size_type_kind kind)
187238fd1498Szrj {
187338fd1498Szrj return build_int_cst (sizetype_tab[(int) kind], number);
187438fd1498Szrj }
187538fd1498Szrj
187638fd1498Szrj /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
187738fd1498Szrj is a tree code. The type of the result is taken from the operands.
187838fd1498Szrj Both must be equivalent integer types, ala int_binop_types_match_p.
187938fd1498Szrj If the operands are constant, so is the result. */
188038fd1498Szrj
188138fd1498Szrj tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)188238fd1498Szrj size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
188338fd1498Szrj {
188438fd1498Szrj tree type = TREE_TYPE (arg0);
188538fd1498Szrj
188638fd1498Szrj if (arg0 == error_mark_node || arg1 == error_mark_node)
188738fd1498Szrj return error_mark_node;
188838fd1498Szrj
188938fd1498Szrj gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
189038fd1498Szrj TREE_TYPE (arg1)));
189138fd1498Szrj
189238fd1498Szrj /* Handle the special case of two poly_int constants faster. */
189338fd1498Szrj if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
189438fd1498Szrj {
189538fd1498Szrj /* And some specific cases even faster than that. */
189638fd1498Szrj if (code == PLUS_EXPR)
189738fd1498Szrj {
189838fd1498Szrj if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
189938fd1498Szrj return arg1;
190038fd1498Szrj if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
190138fd1498Szrj return arg0;
190238fd1498Szrj }
190338fd1498Szrj else if (code == MINUS_EXPR)
190438fd1498Szrj {
190538fd1498Szrj if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
190638fd1498Szrj return arg0;
190738fd1498Szrj }
190838fd1498Szrj else if (code == MULT_EXPR)
190938fd1498Szrj {
191038fd1498Szrj if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
191138fd1498Szrj return arg1;
191238fd1498Szrj }
191338fd1498Szrj
191438fd1498Szrj /* Handle general case of two integer constants. For sizetype
191538fd1498Szrj constant calculations we always want to know about overflow,
191638fd1498Szrj even in the unsigned case. */
191738fd1498Szrj tree res = int_const_binop_1 (code, arg0, arg1, -1);
191838fd1498Szrj if (res != NULL_TREE)
191938fd1498Szrj return res;
192038fd1498Szrj }
192138fd1498Szrj
192238fd1498Szrj return fold_build2_loc (loc, code, type, arg0, arg1);
192338fd1498Szrj }
192438fd1498Szrj
192538fd1498Szrj /* Given two values, either both of sizetype or both of bitsizetype,
192638fd1498Szrj compute the difference between the two values. Return the value
192738fd1498Szrj in signed type corresponding to the type of the operands. */
192838fd1498Szrj
192938fd1498Szrj tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)193038fd1498Szrj size_diffop_loc (location_t loc, tree arg0, tree arg1)
193138fd1498Szrj {
193238fd1498Szrj tree type = TREE_TYPE (arg0);
193338fd1498Szrj tree ctype;
193438fd1498Szrj
193538fd1498Szrj gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
193638fd1498Szrj TREE_TYPE (arg1)));
193738fd1498Szrj
193838fd1498Szrj /* If the type is already signed, just do the simple thing. */
193938fd1498Szrj if (!TYPE_UNSIGNED (type))
194038fd1498Szrj return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
194138fd1498Szrj
194238fd1498Szrj if (type == sizetype)
194338fd1498Szrj ctype = ssizetype;
194438fd1498Szrj else if (type == bitsizetype)
194538fd1498Szrj ctype = sbitsizetype;
194638fd1498Szrj else
194738fd1498Szrj ctype = signed_type_for (type);
194838fd1498Szrj
194938fd1498Szrj /* If either operand is not a constant, do the conversions to the signed
195038fd1498Szrj type and subtract. The hardware will do the right thing with any
195138fd1498Szrj overflow in the subtraction. */
195238fd1498Szrj if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
195338fd1498Szrj return size_binop_loc (loc, MINUS_EXPR,
195438fd1498Szrj fold_convert_loc (loc, ctype, arg0),
195538fd1498Szrj fold_convert_loc (loc, ctype, arg1));
195638fd1498Szrj
195738fd1498Szrj /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
195838fd1498Szrj Otherwise, subtract the other way, convert to CTYPE (we know that can't
195938fd1498Szrj overflow) and negate (which can't either). Special-case a result
196038fd1498Szrj of zero while we're here. */
196138fd1498Szrj if (tree_int_cst_equal (arg0, arg1))
196238fd1498Szrj return build_int_cst (ctype, 0);
196338fd1498Szrj else if (tree_int_cst_lt (arg1, arg0))
196438fd1498Szrj return fold_convert_loc (loc, ctype,
196538fd1498Szrj size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
196638fd1498Szrj else
196738fd1498Szrj return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
196838fd1498Szrj fold_convert_loc (loc, ctype,
196938fd1498Szrj size_binop_loc (loc,
197038fd1498Szrj MINUS_EXPR,
197138fd1498Szrj arg1, arg0)));
197238fd1498Szrj }
197338fd1498Szrj
197438fd1498Szrj /* A subroutine of fold_convert_const handling conversions of an
197538fd1498Szrj INTEGER_CST to another integer type. */
197638fd1498Szrj
197738fd1498Szrj static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)197838fd1498Szrj fold_convert_const_int_from_int (tree type, const_tree arg1)
197938fd1498Szrj {
198038fd1498Szrj /* Given an integer constant, make new constant with new type,
198138fd1498Szrj appropriately sign-extended or truncated. Use widest_int
198238fd1498Szrj so that any extension is done according ARG1's type. */
198338fd1498Szrj return force_fit_type (type, wi::to_widest (arg1),
198438fd1498Szrj !POINTER_TYPE_P (TREE_TYPE (arg1)),
198538fd1498Szrj TREE_OVERFLOW (arg1));
198638fd1498Szrj }
198738fd1498Szrj
198838fd1498Szrj /* A subroutine of fold_convert_const handling conversions a REAL_CST
198938fd1498Szrj to an integer type. */
199038fd1498Szrj
199138fd1498Szrj static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)199238fd1498Szrj fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
199338fd1498Szrj {
199438fd1498Szrj bool overflow = false;
199538fd1498Szrj tree t;
199638fd1498Szrj
199738fd1498Szrj /* The following code implements the floating point to integer
199838fd1498Szrj conversion rules required by the Java Language Specification,
199938fd1498Szrj that IEEE NaNs are mapped to zero and values that overflow
200038fd1498Szrj the target precision saturate, i.e. values greater than
200138fd1498Szrj INT_MAX are mapped to INT_MAX, and values less than INT_MIN
200238fd1498Szrj are mapped to INT_MIN. These semantics are allowed by the
200338fd1498Szrj C and C++ standards that simply state that the behavior of
200438fd1498Szrj FP-to-integer conversion is unspecified upon overflow. */
200538fd1498Szrj
200638fd1498Szrj wide_int val;
200738fd1498Szrj REAL_VALUE_TYPE r;
200838fd1498Szrj REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
200938fd1498Szrj
201038fd1498Szrj switch (code)
201138fd1498Szrj {
201238fd1498Szrj case FIX_TRUNC_EXPR:
201338fd1498Szrj real_trunc (&r, VOIDmode, &x);
201438fd1498Szrj break;
201538fd1498Szrj
201638fd1498Szrj default:
201738fd1498Szrj gcc_unreachable ();
201838fd1498Szrj }
201938fd1498Szrj
202038fd1498Szrj /* If R is NaN, return zero and show we have an overflow. */
202138fd1498Szrj if (REAL_VALUE_ISNAN (r))
202238fd1498Szrj {
202338fd1498Szrj overflow = true;
202438fd1498Szrj val = wi::zero (TYPE_PRECISION (type));
202538fd1498Szrj }
202638fd1498Szrj
202738fd1498Szrj /* See if R is less than the lower bound or greater than the
202838fd1498Szrj upper bound. */
202938fd1498Szrj
203038fd1498Szrj if (! overflow)
203138fd1498Szrj {
203238fd1498Szrj tree lt = TYPE_MIN_VALUE (type);
203338fd1498Szrj REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
203438fd1498Szrj if (real_less (&r, &l))
203538fd1498Szrj {
203638fd1498Szrj overflow = true;
203738fd1498Szrj val = wi::to_wide (lt);
203838fd1498Szrj }
203938fd1498Szrj }
204038fd1498Szrj
204138fd1498Szrj if (! overflow)
204238fd1498Szrj {
204338fd1498Szrj tree ut = TYPE_MAX_VALUE (type);
204438fd1498Szrj if (ut)
204538fd1498Szrj {
204638fd1498Szrj REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
204738fd1498Szrj if (real_less (&u, &r))
204838fd1498Szrj {
204938fd1498Szrj overflow = true;
205038fd1498Szrj val = wi::to_wide (ut);
205138fd1498Szrj }
205238fd1498Szrj }
205338fd1498Szrj }
205438fd1498Szrj
205538fd1498Szrj if (! overflow)
205638fd1498Szrj val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
205738fd1498Szrj
205838fd1498Szrj t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
205938fd1498Szrj return t;
206038fd1498Szrj }
206138fd1498Szrj
206238fd1498Szrj /* A subroutine of fold_convert_const handling conversions of a
206338fd1498Szrj FIXED_CST to an integer type. */
206438fd1498Szrj
206538fd1498Szrj static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)206638fd1498Szrj fold_convert_const_int_from_fixed (tree type, const_tree arg1)
206738fd1498Szrj {
206838fd1498Szrj tree t;
206938fd1498Szrj double_int temp, temp_trunc;
207038fd1498Szrj scalar_mode mode;
207138fd1498Szrj
207238fd1498Szrj /* Right shift FIXED_CST to temp by fbit. */
207338fd1498Szrj temp = TREE_FIXED_CST (arg1).data;
207438fd1498Szrj mode = TREE_FIXED_CST (arg1).mode;
207538fd1498Szrj if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
207638fd1498Szrj {
207738fd1498Szrj temp = temp.rshift (GET_MODE_FBIT (mode),
207838fd1498Szrj HOST_BITS_PER_DOUBLE_INT,
207938fd1498Szrj SIGNED_FIXED_POINT_MODE_P (mode));
208038fd1498Szrj
208138fd1498Szrj /* Left shift temp to temp_trunc by fbit. */
208238fd1498Szrj temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
208338fd1498Szrj HOST_BITS_PER_DOUBLE_INT,
208438fd1498Szrj SIGNED_FIXED_POINT_MODE_P (mode));
208538fd1498Szrj }
208638fd1498Szrj else
208738fd1498Szrj {
208838fd1498Szrj temp = double_int_zero;
208938fd1498Szrj temp_trunc = double_int_zero;
209038fd1498Szrj }
209138fd1498Szrj
209238fd1498Szrj /* If FIXED_CST is negative, we need to round the value toward 0.
209338fd1498Szrj By checking if the fractional bits are not zero to add 1 to temp. */
209438fd1498Szrj if (SIGNED_FIXED_POINT_MODE_P (mode)
209538fd1498Szrj && temp_trunc.is_negative ()
209638fd1498Szrj && TREE_FIXED_CST (arg1).data != temp_trunc)
209738fd1498Szrj temp += double_int_one;
209838fd1498Szrj
209938fd1498Szrj /* Given a fixed-point constant, make new constant with new type,
210038fd1498Szrj appropriately sign-extended or truncated. */
210138fd1498Szrj t = force_fit_type (type, temp, -1,
210238fd1498Szrj (temp.is_negative ()
210338fd1498Szrj && (TYPE_UNSIGNED (type)
210438fd1498Szrj < TYPE_UNSIGNED (TREE_TYPE (arg1))))
210538fd1498Szrj | TREE_OVERFLOW (arg1));
210638fd1498Szrj
210738fd1498Szrj return t;
210838fd1498Szrj }
210938fd1498Szrj
211038fd1498Szrj /* A subroutine of fold_convert_const handling conversions a REAL_CST
211138fd1498Szrj to another floating point type. */
211238fd1498Szrj
211338fd1498Szrj static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)211438fd1498Szrj fold_convert_const_real_from_real (tree type, const_tree arg1)
211538fd1498Szrj {
211638fd1498Szrj REAL_VALUE_TYPE value;
211738fd1498Szrj tree t;
211838fd1498Szrj
211938fd1498Szrj /* Don't perform the operation if flag_signaling_nans is on
212038fd1498Szrj and the operand is a signaling NaN. */
212138fd1498Szrj if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
212238fd1498Szrj && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
212338fd1498Szrj return NULL_TREE;
212438fd1498Szrj
212538fd1498Szrj real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
212638fd1498Szrj t = build_real (type, value);
212738fd1498Szrj
212838fd1498Szrj /* If converting an infinity or NAN to a representation that doesn't
212938fd1498Szrj have one, set the overflow bit so that we can produce some kind of
213038fd1498Szrj error message at the appropriate point if necessary. It's not the
213138fd1498Szrj most user-friendly message, but it's better than nothing. */
213238fd1498Szrj if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
213338fd1498Szrj && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
213438fd1498Szrj TREE_OVERFLOW (t) = 1;
213538fd1498Szrj else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
213638fd1498Szrj && !MODE_HAS_NANS (TYPE_MODE (type)))
213738fd1498Szrj TREE_OVERFLOW (t) = 1;
213838fd1498Szrj /* Regular overflow, conversion produced an infinity in a mode that
213938fd1498Szrj can't represent them. */
214038fd1498Szrj else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
214138fd1498Szrj && REAL_VALUE_ISINF (value)
214238fd1498Szrj && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
214338fd1498Szrj TREE_OVERFLOW (t) = 1;
214438fd1498Szrj else
214538fd1498Szrj TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
214638fd1498Szrj return t;
214738fd1498Szrj }
214838fd1498Szrj
214938fd1498Szrj /* A subroutine of fold_convert_const handling conversions a FIXED_CST
215038fd1498Szrj to a floating point type. */
215138fd1498Szrj
215238fd1498Szrj static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)215338fd1498Szrj fold_convert_const_real_from_fixed (tree type, const_tree arg1)
215438fd1498Szrj {
215538fd1498Szrj REAL_VALUE_TYPE value;
215638fd1498Szrj tree t;
215738fd1498Szrj
215838fd1498Szrj real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
215938fd1498Szrj &TREE_FIXED_CST (arg1));
216038fd1498Szrj t = build_real (type, value);
216138fd1498Szrj
216238fd1498Szrj TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
216338fd1498Szrj return t;
216438fd1498Szrj }
216538fd1498Szrj
216638fd1498Szrj /* A subroutine of fold_convert_const handling conversions a FIXED_CST
216738fd1498Szrj to another fixed-point type. */
216838fd1498Szrj
216938fd1498Szrj static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)217038fd1498Szrj fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
217138fd1498Szrj {
217238fd1498Szrj FIXED_VALUE_TYPE value;
217338fd1498Szrj tree t;
217438fd1498Szrj bool overflow_p;
217538fd1498Szrj
217638fd1498Szrj overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
217738fd1498Szrj &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
217838fd1498Szrj t = build_fixed (type, value);
217938fd1498Szrj
218038fd1498Szrj /* Propagate overflow flags. */
218138fd1498Szrj if (overflow_p | TREE_OVERFLOW (arg1))
218238fd1498Szrj TREE_OVERFLOW (t) = 1;
218338fd1498Szrj return t;
218438fd1498Szrj }
218538fd1498Szrj
218638fd1498Szrj /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
218738fd1498Szrj to a fixed-point type. */
218838fd1498Szrj
218938fd1498Szrj static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)219038fd1498Szrj fold_convert_const_fixed_from_int (tree type, const_tree arg1)
219138fd1498Szrj {
219238fd1498Szrj FIXED_VALUE_TYPE value;
219338fd1498Szrj tree t;
219438fd1498Szrj bool overflow_p;
219538fd1498Szrj double_int di;
219638fd1498Szrj
219738fd1498Szrj gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
219838fd1498Szrj
219938fd1498Szrj di.low = TREE_INT_CST_ELT (arg1, 0);
220038fd1498Szrj if (TREE_INT_CST_NUNITS (arg1) == 1)
220138fd1498Szrj di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
220238fd1498Szrj else
220338fd1498Szrj di.high = TREE_INT_CST_ELT (arg1, 1);
220438fd1498Szrj
220538fd1498Szrj overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
220638fd1498Szrj TYPE_UNSIGNED (TREE_TYPE (arg1)),
220738fd1498Szrj TYPE_SATURATING (type));
220838fd1498Szrj t = build_fixed (type, value);
220938fd1498Szrj
221038fd1498Szrj /* Propagate overflow flags. */
221138fd1498Szrj if (overflow_p | TREE_OVERFLOW (arg1))
221238fd1498Szrj TREE_OVERFLOW (t) = 1;
221338fd1498Szrj return t;
221438fd1498Szrj }
221538fd1498Szrj
221638fd1498Szrj /* A subroutine of fold_convert_const handling conversions a REAL_CST
221738fd1498Szrj to a fixed-point type. */
221838fd1498Szrj
221938fd1498Szrj static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)222038fd1498Szrj fold_convert_const_fixed_from_real (tree type, const_tree arg1)
222138fd1498Szrj {
222238fd1498Szrj FIXED_VALUE_TYPE value;
222338fd1498Szrj tree t;
222438fd1498Szrj bool overflow_p;
222538fd1498Szrj
222638fd1498Szrj overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
222738fd1498Szrj &TREE_REAL_CST (arg1),
222838fd1498Szrj TYPE_SATURATING (type));
222938fd1498Szrj t = build_fixed (type, value);
223038fd1498Szrj
223138fd1498Szrj /* Propagate overflow flags. */
223238fd1498Szrj if (overflow_p | TREE_OVERFLOW (arg1))
223338fd1498Szrj TREE_OVERFLOW (t) = 1;
223438fd1498Szrj return t;
223538fd1498Szrj }
223638fd1498Szrj
223738fd1498Szrj /* Attempt to fold type conversion operation CODE of expression ARG1 to
223838fd1498Szrj type TYPE. If no simplification can be done return NULL_TREE. */
223938fd1498Szrj
224038fd1498Szrj static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)224138fd1498Szrj fold_convert_const (enum tree_code code, tree type, tree arg1)
224238fd1498Szrj {
224338fd1498Szrj tree arg_type = TREE_TYPE (arg1);
224438fd1498Szrj if (arg_type == type)
224538fd1498Szrj return arg1;
224638fd1498Szrj
224738fd1498Szrj /* We can't widen types, since the runtime value could overflow the
224838fd1498Szrj original type before being extended to the new type. */
224938fd1498Szrj if (POLY_INT_CST_P (arg1)
225038fd1498Szrj && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
225138fd1498Szrj && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
225238fd1498Szrj return build_poly_int_cst (type,
225338fd1498Szrj poly_wide_int::from (poly_int_cst_value (arg1),
225438fd1498Szrj TYPE_PRECISION (type),
225538fd1498Szrj TYPE_SIGN (arg_type)));
225638fd1498Szrj
225738fd1498Szrj if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
225838fd1498Szrj || TREE_CODE (type) == OFFSET_TYPE)
225938fd1498Szrj {
226038fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST)
226138fd1498Szrj return fold_convert_const_int_from_int (type, arg1);
226238fd1498Szrj else if (TREE_CODE (arg1) == REAL_CST)
226338fd1498Szrj return fold_convert_const_int_from_real (code, type, arg1);
226438fd1498Szrj else if (TREE_CODE (arg1) == FIXED_CST)
226538fd1498Szrj return fold_convert_const_int_from_fixed (type, arg1);
226638fd1498Szrj }
226738fd1498Szrj else if (TREE_CODE (type) == REAL_TYPE)
226838fd1498Szrj {
226938fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST)
227038fd1498Szrj return build_real_from_int_cst (type, arg1);
227138fd1498Szrj else if (TREE_CODE (arg1) == REAL_CST)
227238fd1498Szrj return fold_convert_const_real_from_real (type, arg1);
227338fd1498Szrj else if (TREE_CODE (arg1) == FIXED_CST)
227438fd1498Szrj return fold_convert_const_real_from_fixed (type, arg1);
227538fd1498Szrj }
227638fd1498Szrj else if (TREE_CODE (type) == FIXED_POINT_TYPE)
227738fd1498Szrj {
227838fd1498Szrj if (TREE_CODE (arg1) == FIXED_CST)
227938fd1498Szrj return fold_convert_const_fixed_from_fixed (type, arg1);
228038fd1498Szrj else if (TREE_CODE (arg1) == INTEGER_CST)
228138fd1498Szrj return fold_convert_const_fixed_from_int (type, arg1);
228238fd1498Szrj else if (TREE_CODE (arg1) == REAL_CST)
228338fd1498Szrj return fold_convert_const_fixed_from_real (type, arg1);
228438fd1498Szrj }
228538fd1498Szrj else if (TREE_CODE (type) == VECTOR_TYPE)
228638fd1498Szrj {
228738fd1498Szrj if (TREE_CODE (arg1) == VECTOR_CST
228838fd1498Szrj && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
228938fd1498Szrj {
229038fd1498Szrj tree elttype = TREE_TYPE (type);
229138fd1498Szrj tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
229238fd1498Szrj /* We can't handle steps directly when extending, since the
229338fd1498Szrj values need to wrap at the original precision first. */
229438fd1498Szrj bool step_ok_p
229538fd1498Szrj = (INTEGRAL_TYPE_P (elttype)
229638fd1498Szrj && INTEGRAL_TYPE_P (arg1_elttype)
229738fd1498Szrj && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
229838fd1498Szrj tree_vector_builder v;
229938fd1498Szrj if (!v.new_unary_operation (type, arg1, step_ok_p))
230038fd1498Szrj return NULL_TREE;
230138fd1498Szrj unsigned int len = v.encoded_nelts ();
230238fd1498Szrj for (unsigned int i = 0; i < len; ++i)
230338fd1498Szrj {
230438fd1498Szrj tree elt = VECTOR_CST_ELT (arg1, i);
230538fd1498Szrj tree cvt = fold_convert_const (code, elttype, elt);
230638fd1498Szrj if (cvt == NULL_TREE)
230738fd1498Szrj return NULL_TREE;
230838fd1498Szrj v.quick_push (cvt);
230938fd1498Szrj }
231038fd1498Szrj return v.build ();
231138fd1498Szrj }
231238fd1498Szrj }
231338fd1498Szrj return NULL_TREE;
231438fd1498Szrj }
231538fd1498Szrj
231638fd1498Szrj /* Construct a vector of zero elements of vector type TYPE. */
231738fd1498Szrj
231838fd1498Szrj static tree
build_zero_vector(tree type)231938fd1498Szrj build_zero_vector (tree type)
232038fd1498Szrj {
232138fd1498Szrj tree t;
232238fd1498Szrj
232338fd1498Szrj t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
232438fd1498Szrj return build_vector_from_val (type, t);
232538fd1498Szrj }
232638fd1498Szrj
232738fd1498Szrj /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
232838fd1498Szrj
232938fd1498Szrj bool
fold_convertible_p(const_tree type,const_tree arg)233038fd1498Szrj fold_convertible_p (const_tree type, const_tree arg)
233138fd1498Szrj {
233238fd1498Szrj tree orig = TREE_TYPE (arg);
233338fd1498Szrj
233438fd1498Szrj if (type == orig)
233538fd1498Szrj return true;
233638fd1498Szrj
233738fd1498Szrj if (TREE_CODE (arg) == ERROR_MARK
233838fd1498Szrj || TREE_CODE (type) == ERROR_MARK
233938fd1498Szrj || TREE_CODE (orig) == ERROR_MARK)
234038fd1498Szrj return false;
234138fd1498Szrj
234238fd1498Szrj if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
234338fd1498Szrj return true;
234438fd1498Szrj
234538fd1498Szrj switch (TREE_CODE (type))
234638fd1498Szrj {
234738fd1498Szrj case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
234838fd1498Szrj case POINTER_TYPE: case REFERENCE_TYPE:
234938fd1498Szrj case OFFSET_TYPE:
235038fd1498Szrj return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
235138fd1498Szrj || TREE_CODE (orig) == OFFSET_TYPE);
235238fd1498Szrj
235338fd1498Szrj case REAL_TYPE:
235438fd1498Szrj case FIXED_POINT_TYPE:
235538fd1498Szrj case VECTOR_TYPE:
235638fd1498Szrj case VOID_TYPE:
235738fd1498Szrj return TREE_CODE (type) == TREE_CODE (orig);
235838fd1498Szrj
235938fd1498Szrj default:
236038fd1498Szrj return false;
236138fd1498Szrj }
236238fd1498Szrj }
236338fd1498Szrj
236438fd1498Szrj /* Convert expression ARG to type TYPE. Used by the middle-end for
236538fd1498Szrj simple conversions in preference to calling the front-end's convert. */
236638fd1498Szrj
236738fd1498Szrj tree
fold_convert_loc(location_t loc,tree type,tree arg)236838fd1498Szrj fold_convert_loc (location_t loc, tree type, tree arg)
236938fd1498Szrj {
237038fd1498Szrj tree orig = TREE_TYPE (arg);
237138fd1498Szrj tree tem;
237238fd1498Szrj
237338fd1498Szrj if (type == orig)
237438fd1498Szrj return arg;
237538fd1498Szrj
237638fd1498Szrj if (TREE_CODE (arg) == ERROR_MARK
237738fd1498Szrj || TREE_CODE (type) == ERROR_MARK
237838fd1498Szrj || TREE_CODE (orig) == ERROR_MARK)
237938fd1498Szrj return error_mark_node;
238038fd1498Szrj
238138fd1498Szrj switch (TREE_CODE (type))
238238fd1498Szrj {
238338fd1498Szrj case POINTER_TYPE:
238438fd1498Szrj case REFERENCE_TYPE:
238538fd1498Szrj /* Handle conversions between pointers to different address spaces. */
238638fd1498Szrj if (POINTER_TYPE_P (orig)
238738fd1498Szrj && (TYPE_ADDR_SPACE (TREE_TYPE (type))
238838fd1498Szrj != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
238938fd1498Szrj return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
239038fd1498Szrj /* fall through */
239138fd1498Szrj
239238fd1498Szrj case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
239338fd1498Szrj case OFFSET_TYPE:
239438fd1498Szrj if (TREE_CODE (arg) == INTEGER_CST)
239538fd1498Szrj {
239638fd1498Szrj tem = fold_convert_const (NOP_EXPR, type, arg);
239738fd1498Szrj if (tem != NULL_TREE)
239838fd1498Szrj return tem;
239938fd1498Szrj }
240038fd1498Szrj if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
240138fd1498Szrj || TREE_CODE (orig) == OFFSET_TYPE)
240238fd1498Szrj return fold_build1_loc (loc, NOP_EXPR, type, arg);
240338fd1498Szrj if (TREE_CODE (orig) == COMPLEX_TYPE)
240438fd1498Szrj return fold_convert_loc (loc, type,
240538fd1498Szrj fold_build1_loc (loc, REALPART_EXPR,
240638fd1498Szrj TREE_TYPE (orig), arg));
240738fd1498Szrj gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
240838fd1498Szrj && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
240938fd1498Szrj return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
241038fd1498Szrj
241138fd1498Szrj case REAL_TYPE:
241238fd1498Szrj if (TREE_CODE (arg) == INTEGER_CST)
241338fd1498Szrj {
241438fd1498Szrj tem = fold_convert_const (FLOAT_EXPR, type, arg);
241538fd1498Szrj if (tem != NULL_TREE)
241638fd1498Szrj return tem;
241738fd1498Szrj }
241838fd1498Szrj else if (TREE_CODE (arg) == REAL_CST)
241938fd1498Szrj {
242038fd1498Szrj tem = fold_convert_const (NOP_EXPR, type, arg);
242138fd1498Szrj if (tem != NULL_TREE)
242238fd1498Szrj return tem;
242338fd1498Szrj }
242438fd1498Szrj else if (TREE_CODE (arg) == FIXED_CST)
242538fd1498Szrj {
242638fd1498Szrj tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
242738fd1498Szrj if (tem != NULL_TREE)
242838fd1498Szrj return tem;
242938fd1498Szrj }
243038fd1498Szrj
243138fd1498Szrj switch (TREE_CODE (orig))
243238fd1498Szrj {
243338fd1498Szrj case INTEGER_TYPE:
243438fd1498Szrj case BOOLEAN_TYPE: case ENUMERAL_TYPE:
243538fd1498Szrj case POINTER_TYPE: case REFERENCE_TYPE:
243638fd1498Szrj return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
243738fd1498Szrj
243838fd1498Szrj case REAL_TYPE:
243938fd1498Szrj return fold_build1_loc (loc, NOP_EXPR, type, arg);
244038fd1498Szrj
244138fd1498Szrj case FIXED_POINT_TYPE:
244238fd1498Szrj return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
244338fd1498Szrj
244438fd1498Szrj case COMPLEX_TYPE:
244538fd1498Szrj tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
244638fd1498Szrj return fold_convert_loc (loc, type, tem);
244738fd1498Szrj
244838fd1498Szrj default:
244938fd1498Szrj gcc_unreachable ();
245038fd1498Szrj }
245138fd1498Szrj
245238fd1498Szrj case FIXED_POINT_TYPE:
245338fd1498Szrj if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
245438fd1498Szrj || TREE_CODE (arg) == REAL_CST)
245538fd1498Szrj {
245638fd1498Szrj tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
245738fd1498Szrj if (tem != NULL_TREE)
245838fd1498Szrj goto fold_convert_exit;
245938fd1498Szrj }
246038fd1498Szrj
246138fd1498Szrj switch (TREE_CODE (orig))
246238fd1498Szrj {
246338fd1498Szrj case FIXED_POINT_TYPE:
246438fd1498Szrj case INTEGER_TYPE:
246538fd1498Szrj case ENUMERAL_TYPE:
246638fd1498Szrj case BOOLEAN_TYPE:
246738fd1498Szrj case REAL_TYPE:
246838fd1498Szrj return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
246938fd1498Szrj
247038fd1498Szrj case COMPLEX_TYPE:
247138fd1498Szrj tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
247238fd1498Szrj return fold_convert_loc (loc, type, tem);
247338fd1498Szrj
247438fd1498Szrj default:
247538fd1498Szrj gcc_unreachable ();
247638fd1498Szrj }
247738fd1498Szrj
247838fd1498Szrj case COMPLEX_TYPE:
247938fd1498Szrj switch (TREE_CODE (orig))
248038fd1498Szrj {
248138fd1498Szrj case INTEGER_TYPE:
248238fd1498Szrj case BOOLEAN_TYPE: case ENUMERAL_TYPE:
248338fd1498Szrj case POINTER_TYPE: case REFERENCE_TYPE:
248438fd1498Szrj case REAL_TYPE:
248538fd1498Szrj case FIXED_POINT_TYPE:
248638fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type,
248738fd1498Szrj fold_convert_loc (loc, TREE_TYPE (type), arg),
248838fd1498Szrj fold_convert_loc (loc, TREE_TYPE (type),
248938fd1498Szrj integer_zero_node));
249038fd1498Szrj case COMPLEX_TYPE:
249138fd1498Szrj {
249238fd1498Szrj tree rpart, ipart;
249338fd1498Szrj
249438fd1498Szrj if (TREE_CODE (arg) == COMPLEX_EXPR)
249538fd1498Szrj {
249638fd1498Szrj rpart = fold_convert_loc (loc, TREE_TYPE (type),
249738fd1498Szrj TREE_OPERAND (arg, 0));
249838fd1498Szrj ipart = fold_convert_loc (loc, TREE_TYPE (type),
249938fd1498Szrj TREE_OPERAND (arg, 1));
250038fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
250138fd1498Szrj }
250238fd1498Szrj
250338fd1498Szrj arg = save_expr (arg);
250438fd1498Szrj rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
250538fd1498Szrj ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
250638fd1498Szrj rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
250738fd1498Szrj ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
250838fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
250938fd1498Szrj }
251038fd1498Szrj
251138fd1498Szrj default:
251238fd1498Szrj gcc_unreachable ();
251338fd1498Szrj }
251438fd1498Szrj
251538fd1498Szrj case VECTOR_TYPE:
251638fd1498Szrj if (integer_zerop (arg))
251738fd1498Szrj return build_zero_vector (type);
251838fd1498Szrj gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
251938fd1498Szrj gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
252038fd1498Szrj || TREE_CODE (orig) == VECTOR_TYPE);
252138fd1498Szrj return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
252238fd1498Szrj
252338fd1498Szrj case VOID_TYPE:
252438fd1498Szrj tem = fold_ignored_result (arg);
252538fd1498Szrj return fold_build1_loc (loc, NOP_EXPR, type, tem);
252638fd1498Szrj
252738fd1498Szrj default:
252838fd1498Szrj if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
252938fd1498Szrj return fold_build1_loc (loc, NOP_EXPR, type, arg);
253038fd1498Szrj gcc_unreachable ();
253138fd1498Szrj }
253238fd1498Szrj fold_convert_exit:
253338fd1498Szrj protected_set_expr_location_unshare (tem, loc);
253438fd1498Szrj return tem;
253538fd1498Szrj }
253638fd1498Szrj
253738fd1498Szrj /* Return false if expr can be assumed not to be an lvalue, true
253838fd1498Szrj otherwise. */
253938fd1498Szrj
254038fd1498Szrj static bool
maybe_lvalue_p(const_tree x)254138fd1498Szrj maybe_lvalue_p (const_tree x)
254238fd1498Szrj {
254338fd1498Szrj /* We only need to wrap lvalue tree codes. */
254438fd1498Szrj switch (TREE_CODE (x))
254538fd1498Szrj {
254638fd1498Szrj case VAR_DECL:
254738fd1498Szrj case PARM_DECL:
254838fd1498Szrj case RESULT_DECL:
254938fd1498Szrj case LABEL_DECL:
255038fd1498Szrj case FUNCTION_DECL:
255138fd1498Szrj case SSA_NAME:
255238fd1498Szrj
255338fd1498Szrj case COMPONENT_REF:
255438fd1498Szrj case MEM_REF:
255538fd1498Szrj case INDIRECT_REF:
255638fd1498Szrj case ARRAY_REF:
255738fd1498Szrj case ARRAY_RANGE_REF:
255838fd1498Szrj case BIT_FIELD_REF:
255938fd1498Szrj case OBJ_TYPE_REF:
256038fd1498Szrj
256138fd1498Szrj case REALPART_EXPR:
256238fd1498Szrj case IMAGPART_EXPR:
256338fd1498Szrj case PREINCREMENT_EXPR:
256438fd1498Szrj case PREDECREMENT_EXPR:
256538fd1498Szrj case SAVE_EXPR:
256638fd1498Szrj case TRY_CATCH_EXPR:
256738fd1498Szrj case WITH_CLEANUP_EXPR:
256838fd1498Szrj case COMPOUND_EXPR:
256938fd1498Szrj case MODIFY_EXPR:
257038fd1498Szrj case TARGET_EXPR:
257138fd1498Szrj case COND_EXPR:
257238fd1498Szrj case BIND_EXPR:
257338fd1498Szrj break;
257438fd1498Szrj
257538fd1498Szrj default:
257638fd1498Szrj /* Assume the worst for front-end tree codes. */
257738fd1498Szrj if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
257838fd1498Szrj break;
257938fd1498Szrj return false;
258038fd1498Szrj }
258138fd1498Szrj
258238fd1498Szrj return true;
258338fd1498Szrj }
258438fd1498Szrj
258538fd1498Szrj /* Return an expr equal to X but certainly not valid as an lvalue. */
258638fd1498Szrj
258738fd1498Szrj tree
non_lvalue_loc(location_t loc,tree x)258838fd1498Szrj non_lvalue_loc (location_t loc, tree x)
258938fd1498Szrj {
259038fd1498Szrj /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
259138fd1498Szrj us. */
259238fd1498Szrj if (in_gimple_form)
259338fd1498Szrj return x;
259438fd1498Szrj
259538fd1498Szrj if (! maybe_lvalue_p (x))
259638fd1498Szrj return x;
259738fd1498Szrj return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
259838fd1498Szrj }
259938fd1498Szrj
260038fd1498Szrj /* When pedantic, return an expr equal to X but certainly not valid as a
260138fd1498Szrj pedantic lvalue. Otherwise, return X. */
260238fd1498Szrj
260338fd1498Szrj static tree
pedantic_non_lvalue_loc(location_t loc,tree x)260438fd1498Szrj pedantic_non_lvalue_loc (location_t loc, tree x)
260538fd1498Szrj {
260638fd1498Szrj return protected_set_expr_location_unshare (x, loc);
260738fd1498Szrj }
260838fd1498Szrj
260938fd1498Szrj /* Given a tree comparison code, return the code that is the logical inverse.
261038fd1498Szrj It is generally not safe to do this for floating-point comparisons, except
261138fd1498Szrj for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
261238fd1498Szrj ERROR_MARK in this case. */
261338fd1498Szrj
261438fd1498Szrj enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)261538fd1498Szrj invert_tree_comparison (enum tree_code code, bool honor_nans)
261638fd1498Szrj {
261738fd1498Szrj if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
261838fd1498Szrj && code != ORDERED_EXPR && code != UNORDERED_EXPR)
261938fd1498Szrj return ERROR_MARK;
262038fd1498Szrj
262138fd1498Szrj switch (code)
262238fd1498Szrj {
262338fd1498Szrj case EQ_EXPR:
262438fd1498Szrj return NE_EXPR;
262538fd1498Szrj case NE_EXPR:
262638fd1498Szrj return EQ_EXPR;
262738fd1498Szrj case GT_EXPR:
262838fd1498Szrj return honor_nans ? UNLE_EXPR : LE_EXPR;
262938fd1498Szrj case GE_EXPR:
263038fd1498Szrj return honor_nans ? UNLT_EXPR : LT_EXPR;
263138fd1498Szrj case LT_EXPR:
263238fd1498Szrj return honor_nans ? UNGE_EXPR : GE_EXPR;
263338fd1498Szrj case LE_EXPR:
263438fd1498Szrj return honor_nans ? UNGT_EXPR : GT_EXPR;
263538fd1498Szrj case LTGT_EXPR:
263638fd1498Szrj return UNEQ_EXPR;
263738fd1498Szrj case UNEQ_EXPR:
263838fd1498Szrj return LTGT_EXPR;
263938fd1498Szrj case UNGT_EXPR:
264038fd1498Szrj return LE_EXPR;
264138fd1498Szrj case UNGE_EXPR:
264238fd1498Szrj return LT_EXPR;
264338fd1498Szrj case UNLT_EXPR:
264438fd1498Szrj return GE_EXPR;
264538fd1498Szrj case UNLE_EXPR:
264638fd1498Szrj return GT_EXPR;
264738fd1498Szrj case ORDERED_EXPR:
264838fd1498Szrj return UNORDERED_EXPR;
264938fd1498Szrj case UNORDERED_EXPR:
265038fd1498Szrj return ORDERED_EXPR;
265138fd1498Szrj default:
265238fd1498Szrj gcc_unreachable ();
265338fd1498Szrj }
265438fd1498Szrj }
265538fd1498Szrj
265638fd1498Szrj /* Similar, but return the comparison that results if the operands are
265738fd1498Szrj swapped. This is safe for floating-point. */
265838fd1498Szrj
265938fd1498Szrj enum tree_code
swap_tree_comparison(enum tree_code code)266038fd1498Szrj swap_tree_comparison (enum tree_code code)
266138fd1498Szrj {
266238fd1498Szrj switch (code)
266338fd1498Szrj {
266438fd1498Szrj case EQ_EXPR:
266538fd1498Szrj case NE_EXPR:
266638fd1498Szrj case ORDERED_EXPR:
266738fd1498Szrj case UNORDERED_EXPR:
266838fd1498Szrj case LTGT_EXPR:
266938fd1498Szrj case UNEQ_EXPR:
267038fd1498Szrj return code;
267138fd1498Szrj case GT_EXPR:
267238fd1498Szrj return LT_EXPR;
267338fd1498Szrj case GE_EXPR:
267438fd1498Szrj return LE_EXPR;
267538fd1498Szrj case LT_EXPR:
267638fd1498Szrj return GT_EXPR;
267738fd1498Szrj case LE_EXPR:
267838fd1498Szrj return GE_EXPR;
267938fd1498Szrj case UNGT_EXPR:
268038fd1498Szrj return UNLT_EXPR;
268138fd1498Szrj case UNGE_EXPR:
268238fd1498Szrj return UNLE_EXPR;
268338fd1498Szrj case UNLT_EXPR:
268438fd1498Szrj return UNGT_EXPR;
268538fd1498Szrj case UNLE_EXPR:
268638fd1498Szrj return UNGE_EXPR;
268738fd1498Szrj default:
268838fd1498Szrj gcc_unreachable ();
268938fd1498Szrj }
269038fd1498Szrj }
269138fd1498Szrj
269238fd1498Szrj
269338fd1498Szrj /* Convert a comparison tree code from an enum tree_code representation
269438fd1498Szrj into a compcode bit-based encoding. This function is the inverse of
269538fd1498Szrj compcode_to_comparison. */
269638fd1498Szrj
269738fd1498Szrj static enum comparison_code
comparison_to_compcode(enum tree_code code)269838fd1498Szrj comparison_to_compcode (enum tree_code code)
269938fd1498Szrj {
270038fd1498Szrj switch (code)
270138fd1498Szrj {
270238fd1498Szrj case LT_EXPR:
270338fd1498Szrj return COMPCODE_LT;
270438fd1498Szrj case EQ_EXPR:
270538fd1498Szrj return COMPCODE_EQ;
270638fd1498Szrj case LE_EXPR:
270738fd1498Szrj return COMPCODE_LE;
270838fd1498Szrj case GT_EXPR:
270938fd1498Szrj return COMPCODE_GT;
271038fd1498Szrj case NE_EXPR:
271138fd1498Szrj return COMPCODE_NE;
271238fd1498Szrj case GE_EXPR:
271338fd1498Szrj return COMPCODE_GE;
271438fd1498Szrj case ORDERED_EXPR:
271538fd1498Szrj return COMPCODE_ORD;
271638fd1498Szrj case UNORDERED_EXPR:
271738fd1498Szrj return COMPCODE_UNORD;
271838fd1498Szrj case UNLT_EXPR:
271938fd1498Szrj return COMPCODE_UNLT;
272038fd1498Szrj case UNEQ_EXPR:
272138fd1498Szrj return COMPCODE_UNEQ;
272238fd1498Szrj case UNLE_EXPR:
272338fd1498Szrj return COMPCODE_UNLE;
272438fd1498Szrj case UNGT_EXPR:
272538fd1498Szrj return COMPCODE_UNGT;
272638fd1498Szrj case LTGT_EXPR:
272738fd1498Szrj return COMPCODE_LTGT;
272838fd1498Szrj case UNGE_EXPR:
272938fd1498Szrj return COMPCODE_UNGE;
273038fd1498Szrj default:
273138fd1498Szrj gcc_unreachable ();
273238fd1498Szrj }
273338fd1498Szrj }
273438fd1498Szrj
273538fd1498Szrj /* Convert a compcode bit-based encoding of a comparison operator back
273638fd1498Szrj to GCC's enum tree_code representation. This function is the
273738fd1498Szrj inverse of comparison_to_compcode. */
273838fd1498Szrj
273938fd1498Szrj static enum tree_code
compcode_to_comparison(enum comparison_code code)274038fd1498Szrj compcode_to_comparison (enum comparison_code code)
274138fd1498Szrj {
274238fd1498Szrj switch (code)
274338fd1498Szrj {
274438fd1498Szrj case COMPCODE_LT:
274538fd1498Szrj return LT_EXPR;
274638fd1498Szrj case COMPCODE_EQ:
274738fd1498Szrj return EQ_EXPR;
274838fd1498Szrj case COMPCODE_LE:
274938fd1498Szrj return LE_EXPR;
275038fd1498Szrj case COMPCODE_GT:
275138fd1498Szrj return GT_EXPR;
275238fd1498Szrj case COMPCODE_NE:
275338fd1498Szrj return NE_EXPR;
275438fd1498Szrj case COMPCODE_GE:
275538fd1498Szrj return GE_EXPR;
275638fd1498Szrj case COMPCODE_ORD:
275738fd1498Szrj return ORDERED_EXPR;
275838fd1498Szrj case COMPCODE_UNORD:
275938fd1498Szrj return UNORDERED_EXPR;
276038fd1498Szrj case COMPCODE_UNLT:
276138fd1498Szrj return UNLT_EXPR;
276238fd1498Szrj case COMPCODE_UNEQ:
276338fd1498Szrj return UNEQ_EXPR;
276438fd1498Szrj case COMPCODE_UNLE:
276538fd1498Szrj return UNLE_EXPR;
276638fd1498Szrj case COMPCODE_UNGT:
276738fd1498Szrj return UNGT_EXPR;
276838fd1498Szrj case COMPCODE_LTGT:
276938fd1498Szrj return LTGT_EXPR;
277038fd1498Szrj case COMPCODE_UNGE:
277138fd1498Szrj return UNGE_EXPR;
277238fd1498Szrj default:
277338fd1498Szrj gcc_unreachable ();
277438fd1498Szrj }
277538fd1498Szrj }
277638fd1498Szrj
277738fd1498Szrj /* Return a tree for the comparison which is the combination of
277838fd1498Szrj doing the AND or OR (depending on CODE) of the two operations LCODE
277938fd1498Szrj and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
278038fd1498Szrj the possibility of trapping if the mode has NaNs, and return NULL_TREE
278138fd1498Szrj if this makes the transformation invalid. */
278238fd1498Szrj
278338fd1498Szrj tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)278438fd1498Szrj combine_comparisons (location_t loc,
278538fd1498Szrj enum tree_code code, enum tree_code lcode,
278638fd1498Szrj enum tree_code rcode, tree truth_type,
278738fd1498Szrj tree ll_arg, tree lr_arg)
278838fd1498Szrj {
278938fd1498Szrj bool honor_nans = HONOR_NANS (ll_arg);
279038fd1498Szrj enum comparison_code lcompcode = comparison_to_compcode (lcode);
279138fd1498Szrj enum comparison_code rcompcode = comparison_to_compcode (rcode);
279238fd1498Szrj int compcode;
279338fd1498Szrj
279438fd1498Szrj switch (code)
279538fd1498Szrj {
279638fd1498Szrj case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
279738fd1498Szrj compcode = lcompcode & rcompcode;
279838fd1498Szrj break;
279938fd1498Szrj
280038fd1498Szrj case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
280138fd1498Szrj compcode = lcompcode | rcompcode;
280238fd1498Szrj break;
280338fd1498Szrj
280438fd1498Szrj default:
280538fd1498Szrj return NULL_TREE;
280638fd1498Szrj }
280738fd1498Szrj
280838fd1498Szrj if (!honor_nans)
280938fd1498Szrj {
281038fd1498Szrj /* Eliminate unordered comparisons, as well as LTGT and ORD
281138fd1498Szrj which are not used unless the mode has NaNs. */
281238fd1498Szrj compcode &= ~COMPCODE_UNORD;
281338fd1498Szrj if (compcode == COMPCODE_LTGT)
281438fd1498Szrj compcode = COMPCODE_NE;
281538fd1498Szrj else if (compcode == COMPCODE_ORD)
281638fd1498Szrj compcode = COMPCODE_TRUE;
281738fd1498Szrj }
281838fd1498Szrj else if (flag_trapping_math)
281938fd1498Szrj {
282038fd1498Szrj /* Check that the original operation and the optimized ones will trap
282138fd1498Szrj under the same condition. */
282238fd1498Szrj bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
282338fd1498Szrj && (lcompcode != COMPCODE_EQ)
282438fd1498Szrj && (lcompcode != COMPCODE_ORD);
282538fd1498Szrj bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
282638fd1498Szrj && (rcompcode != COMPCODE_EQ)
282738fd1498Szrj && (rcompcode != COMPCODE_ORD);
282838fd1498Szrj bool trap = (compcode & COMPCODE_UNORD) == 0
282938fd1498Szrj && (compcode != COMPCODE_EQ)
283038fd1498Szrj && (compcode != COMPCODE_ORD);
283138fd1498Szrj
283238fd1498Szrj /* In a short-circuited boolean expression the LHS might be
283338fd1498Szrj such that the RHS, if evaluated, will never trap. For
283438fd1498Szrj example, in ORD (x, y) && (x < y), we evaluate the RHS only
283538fd1498Szrj if neither x nor y is NaN. (This is a mixed blessing: for
283638fd1498Szrj example, the expression above will never trap, hence
283738fd1498Szrj optimizing it to x < y would be invalid). */
283838fd1498Szrj if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
283938fd1498Szrj || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
284038fd1498Szrj rtrap = false;
284138fd1498Szrj
284238fd1498Szrj /* If the comparison was short-circuited, and only the RHS
284338fd1498Szrj trapped, we may now generate a spurious trap. */
284438fd1498Szrj if (rtrap && !ltrap
284538fd1498Szrj && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
284638fd1498Szrj return NULL_TREE;
284738fd1498Szrj
284838fd1498Szrj /* If we changed the conditions that cause a trap, we lose. */
284938fd1498Szrj if ((ltrap || rtrap) != trap)
285038fd1498Szrj return NULL_TREE;
285138fd1498Szrj }
285238fd1498Szrj
285338fd1498Szrj if (compcode == COMPCODE_TRUE)
285438fd1498Szrj return constant_boolean_node (true, truth_type);
285538fd1498Szrj else if (compcode == COMPCODE_FALSE)
285638fd1498Szrj return constant_boolean_node (false, truth_type);
285738fd1498Szrj else
285838fd1498Szrj {
285938fd1498Szrj enum tree_code tcode;
286038fd1498Szrj
286138fd1498Szrj tcode = compcode_to_comparison ((enum comparison_code) compcode);
286238fd1498Szrj return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
286338fd1498Szrj }
286438fd1498Szrj }
286538fd1498Szrj
286638fd1498Szrj /* Return nonzero if two operands (typically of the same tree node)
286738fd1498Szrj are necessarily equal. FLAGS modifies behavior as follows:
286838fd1498Szrj
286938fd1498Szrj If OEP_ONLY_CONST is set, only return nonzero for constants.
287038fd1498Szrj This function tests whether the operands are indistinguishable;
287138fd1498Szrj it does not test whether they are equal using C's == operation.
287238fd1498Szrj The distinction is important for IEEE floating point, because
287338fd1498Szrj (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
287438fd1498Szrj (2) two NaNs may be indistinguishable, but NaN!=NaN.
287538fd1498Szrj
287638fd1498Szrj If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
287738fd1498Szrj even though it may hold multiple values during a function.
287838fd1498Szrj This is because a GCC tree node guarantees that nothing else is
287938fd1498Szrj executed between the evaluation of its "operands" (which may often
288038fd1498Szrj be evaluated in arbitrary order). Hence if the operands themselves
288138fd1498Szrj don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
288238fd1498Szrj same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
288338fd1498Szrj unset means assuming isochronic (or instantaneous) tree equivalence.
288438fd1498Szrj Unless comparing arbitrary expression trees, such as from different
288538fd1498Szrj statements, this flag can usually be left unset.
288638fd1498Szrj
288738fd1498Szrj If OEP_PURE_SAME is set, then pure functions with identical arguments
288838fd1498Szrj are considered the same. It is used when the caller has other ways
288938fd1498Szrj to ensure that global memory is unchanged in between.
289038fd1498Szrj
289138fd1498Szrj If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
289238fd1498Szrj not values of expressions.
289338fd1498Szrj
289438fd1498Szrj If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
289538fd1498Szrj such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
289638fd1498Szrj
289738fd1498Szrj Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
289838fd1498Szrj any operand with side effect. This is unnecesarily conservative in the
289938fd1498Szrj case we know that arg0 and arg1 are in disjoint code paths (such as in
290038fd1498Szrj ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
290138fd1498Szrj addresses with TREE_CONSTANT flag set so we know that &var == &var
290238fd1498Szrj even if var is volatile. */
290338fd1498Szrj
290438fd1498Szrj int
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)290538fd1498Szrj operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
290638fd1498Szrj {
290738fd1498Szrj /* When checking, verify at the outermost operand_equal_p call that
290838fd1498Szrj if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
290938fd1498Szrj hash value. */
291038fd1498Szrj if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
291138fd1498Szrj {
291238fd1498Szrj if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
291338fd1498Szrj {
291438fd1498Szrj if (arg0 != arg1)
291538fd1498Szrj {
291638fd1498Szrj inchash::hash hstate0 (0), hstate1 (0);
291738fd1498Szrj inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
291838fd1498Szrj inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
291938fd1498Szrj hashval_t h0 = hstate0.end ();
292038fd1498Szrj hashval_t h1 = hstate1.end ();
292138fd1498Szrj gcc_assert (h0 == h1);
292238fd1498Szrj }
292338fd1498Szrj return 1;
292438fd1498Szrj }
292538fd1498Szrj else
292638fd1498Szrj return 0;
292738fd1498Szrj }
292838fd1498Szrj
292938fd1498Szrj /* If either is ERROR_MARK, they aren't equal. */
293038fd1498Szrj if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
293138fd1498Szrj || TREE_TYPE (arg0) == error_mark_node
293238fd1498Szrj || TREE_TYPE (arg1) == error_mark_node)
293338fd1498Szrj return 0;
293438fd1498Szrj
293538fd1498Szrj /* Similar, if either does not have a type (like a released SSA name),
293638fd1498Szrj they aren't equal. */
293738fd1498Szrj if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
293838fd1498Szrj return 0;
293938fd1498Szrj
294038fd1498Szrj /* We cannot consider pointers to different address space equal. */
294138fd1498Szrj if (POINTER_TYPE_P (TREE_TYPE (arg0))
294238fd1498Szrj && POINTER_TYPE_P (TREE_TYPE (arg1))
294338fd1498Szrj && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
294438fd1498Szrj != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
294538fd1498Szrj return 0;
294638fd1498Szrj
294738fd1498Szrj /* Check equality of integer constants before bailing out due to
294838fd1498Szrj precision differences. */
294938fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
295038fd1498Szrj {
295138fd1498Szrj /* Address of INTEGER_CST is not defined; check that we did not forget
295238fd1498Szrj to drop the OEP_ADDRESS_OF flags. */
295338fd1498Szrj gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
295438fd1498Szrj return tree_int_cst_equal (arg0, arg1);
295538fd1498Szrj }
295638fd1498Szrj
295738fd1498Szrj if (!(flags & OEP_ADDRESS_OF))
295838fd1498Szrj {
295938fd1498Szrj /* If both types don't have the same signedness, then we can't consider
296038fd1498Szrj them equal. We must check this before the STRIP_NOPS calls
296138fd1498Szrj because they may change the signedness of the arguments. As pointers
296238fd1498Szrj strictly don't have a signedness, require either two pointers or
296338fd1498Szrj two non-pointers as well. */
296438fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
296538fd1498Szrj || POINTER_TYPE_P (TREE_TYPE (arg0))
296638fd1498Szrj != POINTER_TYPE_P (TREE_TYPE (arg1)))
296738fd1498Szrj return 0;
296838fd1498Szrj
296938fd1498Szrj /* If both types don't have the same precision, then it is not safe
297038fd1498Szrj to strip NOPs. */
297138fd1498Szrj if (element_precision (TREE_TYPE (arg0))
297238fd1498Szrj != element_precision (TREE_TYPE (arg1)))
297338fd1498Szrj return 0;
297438fd1498Szrj
297538fd1498Szrj STRIP_NOPS (arg0);
297638fd1498Szrj STRIP_NOPS (arg1);
297738fd1498Szrj }
297838fd1498Szrj #if 0
297938fd1498Szrj /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
298038fd1498Szrj sanity check once the issue is solved. */
298138fd1498Szrj else
298238fd1498Szrj /* Addresses of conversions and SSA_NAMEs (and many other things)
298338fd1498Szrj are not defined. Check that we did not forget to drop the
298438fd1498Szrj OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
298538fd1498Szrj gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
298638fd1498Szrj && TREE_CODE (arg0) != SSA_NAME);
298738fd1498Szrj #endif
298838fd1498Szrj
298938fd1498Szrj /* In case both args are comparisons but with different comparison
299038fd1498Szrj code, try to swap the comparison operands of one arg to produce
299138fd1498Szrj a match and compare that variant. */
299238fd1498Szrj if (TREE_CODE (arg0) != TREE_CODE (arg1)
299338fd1498Szrj && COMPARISON_CLASS_P (arg0)
299438fd1498Szrj && COMPARISON_CLASS_P (arg1))
299538fd1498Szrj {
299638fd1498Szrj enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
299738fd1498Szrj
299838fd1498Szrj if (TREE_CODE (arg0) == swap_code)
299938fd1498Szrj return operand_equal_p (TREE_OPERAND (arg0, 0),
300038fd1498Szrj TREE_OPERAND (arg1, 1), flags)
300138fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 1),
300238fd1498Szrj TREE_OPERAND (arg1, 0), flags);
300338fd1498Szrj }
300438fd1498Szrj
300538fd1498Szrj if (TREE_CODE (arg0) != TREE_CODE (arg1))
300638fd1498Szrj {
300738fd1498Szrj /* NOP_EXPR and CONVERT_EXPR are considered equal. */
300838fd1498Szrj if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
300938fd1498Szrj ;
301038fd1498Szrj else if (flags & OEP_ADDRESS_OF)
301138fd1498Szrj {
301238fd1498Szrj /* If we are interested in comparing addresses ignore
301338fd1498Szrj MEM_REF wrappings of the base that can appear just for
301438fd1498Szrj TBAA reasons. */
301538fd1498Szrj if (TREE_CODE (arg0) == MEM_REF
301638fd1498Szrj && DECL_P (arg1)
301738fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
301838fd1498Szrj && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
301938fd1498Szrj && integer_zerop (TREE_OPERAND (arg0, 1)))
302038fd1498Szrj return 1;
302138fd1498Szrj else if (TREE_CODE (arg1) == MEM_REF
302238fd1498Szrj && DECL_P (arg0)
302338fd1498Szrj && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
302438fd1498Szrj && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
302538fd1498Szrj && integer_zerop (TREE_OPERAND (arg1, 1)))
302638fd1498Szrj return 1;
302738fd1498Szrj return 0;
302838fd1498Szrj }
302938fd1498Szrj else
303038fd1498Szrj return 0;
303138fd1498Szrj }
303238fd1498Szrj
303338fd1498Szrj /* When not checking adddresses, this is needed for conversions and for
303438fd1498Szrj COMPONENT_REF. Might as well play it safe and always test this. */
303538fd1498Szrj if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
303638fd1498Szrj || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
303738fd1498Szrj || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
303838fd1498Szrj && !(flags & OEP_ADDRESS_OF)))
303938fd1498Szrj return 0;
304038fd1498Szrj
304138fd1498Szrj /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
304238fd1498Szrj We don't care about side effects in that case because the SAVE_EXPR
304338fd1498Szrj takes care of that for us. In all other cases, two expressions are
304438fd1498Szrj equal if they have no side effects. If we have two identical
304538fd1498Szrj expressions with side effects that should be treated the same due
304638fd1498Szrj to the only side effects being identical SAVE_EXPR's, that will
304738fd1498Szrj be detected in the recursive calls below.
304838fd1498Szrj If we are taking an invariant address of two identical objects
304938fd1498Szrj they are necessarily equal as well. */
305038fd1498Szrj if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
305138fd1498Szrj && (TREE_CODE (arg0) == SAVE_EXPR
305238fd1498Szrj || (flags & OEP_MATCH_SIDE_EFFECTS)
305338fd1498Szrj || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
305438fd1498Szrj return 1;
305538fd1498Szrj
305638fd1498Szrj /* Next handle constant cases, those for which we can return 1 even
305738fd1498Szrj if ONLY_CONST is set. */
305838fd1498Szrj if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
305938fd1498Szrj switch (TREE_CODE (arg0))
306038fd1498Szrj {
306138fd1498Szrj case INTEGER_CST:
306238fd1498Szrj return tree_int_cst_equal (arg0, arg1);
306338fd1498Szrj
306438fd1498Szrj case FIXED_CST:
306538fd1498Szrj return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
306638fd1498Szrj TREE_FIXED_CST (arg1));
306738fd1498Szrj
306838fd1498Szrj case REAL_CST:
306938fd1498Szrj if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
307038fd1498Szrj return 1;
307138fd1498Szrj
307238fd1498Szrj
307338fd1498Szrj if (!HONOR_SIGNED_ZEROS (arg0))
307438fd1498Szrj {
307538fd1498Szrj /* If we do not distinguish between signed and unsigned zero,
307638fd1498Szrj consider them equal. */
307738fd1498Szrj if (real_zerop (arg0) && real_zerop (arg1))
307838fd1498Szrj return 1;
307938fd1498Szrj }
308038fd1498Szrj return 0;
308138fd1498Szrj
308238fd1498Szrj case VECTOR_CST:
308338fd1498Szrj {
308438fd1498Szrj if (VECTOR_CST_LOG2_NPATTERNS (arg0)
308538fd1498Szrj != VECTOR_CST_LOG2_NPATTERNS (arg1))
308638fd1498Szrj return 0;
308738fd1498Szrj
308838fd1498Szrj if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
308938fd1498Szrj != VECTOR_CST_NELTS_PER_PATTERN (arg1))
309038fd1498Szrj return 0;
309138fd1498Szrj
309238fd1498Szrj unsigned int count = vector_cst_encoded_nelts (arg0);
309338fd1498Szrj for (unsigned int i = 0; i < count; ++i)
309438fd1498Szrj if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
309538fd1498Szrj VECTOR_CST_ENCODED_ELT (arg1, i), flags))
309638fd1498Szrj return 0;
309738fd1498Szrj return 1;
309838fd1498Szrj }
309938fd1498Szrj
310038fd1498Szrj case COMPLEX_CST:
310138fd1498Szrj return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
310238fd1498Szrj flags)
310338fd1498Szrj && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
310438fd1498Szrj flags));
310538fd1498Szrj
310638fd1498Szrj case STRING_CST:
310738fd1498Szrj return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
310838fd1498Szrj && ! memcmp (TREE_STRING_POINTER (arg0),
310938fd1498Szrj TREE_STRING_POINTER (arg1),
311038fd1498Szrj TREE_STRING_LENGTH (arg0)));
311138fd1498Szrj
311238fd1498Szrj case ADDR_EXPR:
311338fd1498Szrj gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
311438fd1498Szrj return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
311538fd1498Szrj flags | OEP_ADDRESS_OF
311638fd1498Szrj | OEP_MATCH_SIDE_EFFECTS);
311738fd1498Szrj case CONSTRUCTOR:
311838fd1498Szrj /* In GIMPLE empty constructors are allowed in initializers of
311938fd1498Szrj aggregates. */
312038fd1498Szrj return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
312138fd1498Szrj default:
312238fd1498Szrj break;
312338fd1498Szrj }
312438fd1498Szrj
312538fd1498Szrj if (flags & OEP_ONLY_CONST)
312638fd1498Szrj return 0;
312738fd1498Szrj
312838fd1498Szrj /* Define macros to test an operand from arg0 and arg1 for equality and a
312938fd1498Szrj variant that allows null and views null as being different from any
313038fd1498Szrj non-null value. In the latter case, if either is null, the both
313138fd1498Szrj must be; otherwise, do the normal comparison. */
313238fd1498Szrj #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
313338fd1498Szrj TREE_OPERAND (arg1, N), flags)
313438fd1498Szrj
313538fd1498Szrj #define OP_SAME_WITH_NULL(N) \
313638fd1498Szrj ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
313738fd1498Szrj ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
313838fd1498Szrj
313938fd1498Szrj switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
314038fd1498Szrj {
314138fd1498Szrj case tcc_unary:
314238fd1498Szrj /* Two conversions are equal only if signedness and modes match. */
314338fd1498Szrj switch (TREE_CODE (arg0))
314438fd1498Szrj {
314538fd1498Szrj CASE_CONVERT:
314638fd1498Szrj case FIX_TRUNC_EXPR:
314738fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (arg0))
314838fd1498Szrj != TYPE_UNSIGNED (TREE_TYPE (arg1)))
314938fd1498Szrj return 0;
315038fd1498Szrj break;
315138fd1498Szrj default:
315238fd1498Szrj break;
315338fd1498Szrj }
315438fd1498Szrj
315538fd1498Szrj return OP_SAME (0);
315638fd1498Szrj
315738fd1498Szrj
315838fd1498Szrj case tcc_comparison:
315938fd1498Szrj case tcc_binary:
316038fd1498Szrj if (OP_SAME (0) && OP_SAME (1))
316138fd1498Szrj return 1;
316238fd1498Szrj
316338fd1498Szrj /* For commutative ops, allow the other order. */
316438fd1498Szrj return (commutative_tree_code (TREE_CODE (arg0))
316538fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0),
316638fd1498Szrj TREE_OPERAND (arg1, 1), flags)
316738fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 1),
316838fd1498Szrj TREE_OPERAND (arg1, 0), flags));
316938fd1498Szrj
317038fd1498Szrj case tcc_reference:
317138fd1498Szrj /* If either of the pointer (or reference) expressions we are
317238fd1498Szrj dereferencing contain a side effect, these cannot be equal,
317338fd1498Szrj but their addresses can be. */
317438fd1498Szrj if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
317538fd1498Szrj && (TREE_SIDE_EFFECTS (arg0)
317638fd1498Szrj || TREE_SIDE_EFFECTS (arg1)))
317738fd1498Szrj return 0;
317838fd1498Szrj
317938fd1498Szrj switch (TREE_CODE (arg0))
318038fd1498Szrj {
318138fd1498Szrj case INDIRECT_REF:
318238fd1498Szrj if (!(flags & OEP_ADDRESS_OF)
318338fd1498Szrj && (TYPE_ALIGN (TREE_TYPE (arg0))
318438fd1498Szrj != TYPE_ALIGN (TREE_TYPE (arg1))))
318538fd1498Szrj return 0;
318638fd1498Szrj flags &= ~OEP_ADDRESS_OF;
318738fd1498Szrj return OP_SAME (0);
318838fd1498Szrj
318938fd1498Szrj case IMAGPART_EXPR:
319038fd1498Szrj /* Require the same offset. */
319138fd1498Szrj if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
319238fd1498Szrj TYPE_SIZE (TREE_TYPE (arg1)),
319338fd1498Szrj flags & ~OEP_ADDRESS_OF))
319438fd1498Szrj return 0;
319538fd1498Szrj
319638fd1498Szrj /* Fallthru. */
319738fd1498Szrj case REALPART_EXPR:
319838fd1498Szrj case VIEW_CONVERT_EXPR:
319938fd1498Szrj return OP_SAME (0);
320038fd1498Szrj
320138fd1498Szrj case TARGET_MEM_REF:
320238fd1498Szrj case MEM_REF:
320338fd1498Szrj if (!(flags & OEP_ADDRESS_OF))
320438fd1498Szrj {
320538fd1498Szrj /* Require equal access sizes */
320638fd1498Szrj if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
320738fd1498Szrj && (!TYPE_SIZE (TREE_TYPE (arg0))
320838fd1498Szrj || !TYPE_SIZE (TREE_TYPE (arg1))
320938fd1498Szrj || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
321038fd1498Szrj TYPE_SIZE (TREE_TYPE (arg1)),
321138fd1498Szrj flags)))
321238fd1498Szrj return 0;
321338fd1498Szrj /* Verify that access happens in similar types. */
321438fd1498Szrj if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
321538fd1498Szrj return 0;
321638fd1498Szrj /* Verify that accesses are TBAA compatible. */
321738fd1498Szrj if (!alias_ptr_types_compatible_p
321838fd1498Szrj (TREE_TYPE (TREE_OPERAND (arg0, 1)),
321938fd1498Szrj TREE_TYPE (TREE_OPERAND (arg1, 1)))
322038fd1498Szrj || (MR_DEPENDENCE_CLIQUE (arg0)
322138fd1498Szrj != MR_DEPENDENCE_CLIQUE (arg1))
322238fd1498Szrj || (MR_DEPENDENCE_BASE (arg0)
322338fd1498Szrj != MR_DEPENDENCE_BASE (arg1)))
322438fd1498Szrj return 0;
322538fd1498Szrj /* Verify that alignment is compatible. */
322638fd1498Szrj if (TYPE_ALIGN (TREE_TYPE (arg0))
322738fd1498Szrj != TYPE_ALIGN (TREE_TYPE (arg1)))
322838fd1498Szrj return 0;
322938fd1498Szrj }
323038fd1498Szrj flags &= ~OEP_ADDRESS_OF;
323138fd1498Szrj return (OP_SAME (0) && OP_SAME (1)
323238fd1498Szrj /* TARGET_MEM_REF require equal extra operands. */
323338fd1498Szrj && (TREE_CODE (arg0) != TARGET_MEM_REF
323438fd1498Szrj || (OP_SAME_WITH_NULL (2)
323538fd1498Szrj && OP_SAME_WITH_NULL (3)
323638fd1498Szrj && OP_SAME_WITH_NULL (4))));
323738fd1498Szrj
323838fd1498Szrj case ARRAY_REF:
323938fd1498Szrj case ARRAY_RANGE_REF:
324038fd1498Szrj if (!OP_SAME (0))
324138fd1498Szrj return 0;
324238fd1498Szrj flags &= ~OEP_ADDRESS_OF;
324338fd1498Szrj /* Compare the array index by value if it is constant first as we
324438fd1498Szrj may have different types but same value here. */
324538fd1498Szrj return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
324638fd1498Szrj TREE_OPERAND (arg1, 1))
324738fd1498Szrj || OP_SAME (1))
324838fd1498Szrj && OP_SAME_WITH_NULL (2)
324938fd1498Szrj && OP_SAME_WITH_NULL (3)
325038fd1498Szrj /* Compare low bound and element size as with OEP_ADDRESS_OF
325138fd1498Szrj we have to account for the offset of the ref. */
325238fd1498Szrj && (TREE_TYPE (TREE_OPERAND (arg0, 0))
325338fd1498Szrj == TREE_TYPE (TREE_OPERAND (arg1, 0))
325438fd1498Szrj || (operand_equal_p (array_ref_low_bound
325538fd1498Szrj (CONST_CAST_TREE (arg0)),
325638fd1498Szrj array_ref_low_bound
325738fd1498Szrj (CONST_CAST_TREE (arg1)), flags)
325838fd1498Szrj && operand_equal_p (array_ref_element_size
325938fd1498Szrj (CONST_CAST_TREE (arg0)),
326038fd1498Szrj array_ref_element_size
326138fd1498Szrj (CONST_CAST_TREE (arg1)),
326238fd1498Szrj flags))));
326338fd1498Szrj
326438fd1498Szrj case COMPONENT_REF:
326538fd1498Szrj /* Handle operand 2 the same as for ARRAY_REF. Operand 0
326638fd1498Szrj may be NULL when we're called to compare MEM_EXPRs. */
326738fd1498Szrj if (!OP_SAME_WITH_NULL (0)
326838fd1498Szrj || !OP_SAME (1))
326938fd1498Szrj return 0;
327038fd1498Szrj flags &= ~OEP_ADDRESS_OF;
327138fd1498Szrj return OP_SAME_WITH_NULL (2);
327238fd1498Szrj
327338fd1498Szrj case BIT_FIELD_REF:
327438fd1498Szrj if (!OP_SAME (0))
327538fd1498Szrj return 0;
327638fd1498Szrj flags &= ~OEP_ADDRESS_OF;
327738fd1498Szrj return OP_SAME (1) && OP_SAME (2);
327838fd1498Szrj
327938fd1498Szrj default:
328038fd1498Szrj return 0;
328138fd1498Szrj }
328238fd1498Szrj
328338fd1498Szrj case tcc_expression:
328438fd1498Szrj switch (TREE_CODE (arg0))
328538fd1498Szrj {
328638fd1498Szrj case ADDR_EXPR:
328738fd1498Szrj /* Be sure we pass right ADDRESS_OF flag. */
328838fd1498Szrj gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
328938fd1498Szrj return operand_equal_p (TREE_OPERAND (arg0, 0),
329038fd1498Szrj TREE_OPERAND (arg1, 0),
329138fd1498Szrj flags | OEP_ADDRESS_OF);
329238fd1498Szrj
329338fd1498Szrj case TRUTH_NOT_EXPR:
329438fd1498Szrj return OP_SAME (0);
329538fd1498Szrj
329638fd1498Szrj case TRUTH_ANDIF_EXPR:
329738fd1498Szrj case TRUTH_ORIF_EXPR:
329838fd1498Szrj return OP_SAME (0) && OP_SAME (1);
329938fd1498Szrj
330038fd1498Szrj case FMA_EXPR:
330138fd1498Szrj case WIDEN_MULT_PLUS_EXPR:
330238fd1498Szrj case WIDEN_MULT_MINUS_EXPR:
330338fd1498Szrj if (!OP_SAME (2))
330438fd1498Szrj return 0;
330538fd1498Szrj /* The multiplcation operands are commutative. */
330638fd1498Szrj /* FALLTHRU */
330738fd1498Szrj
330838fd1498Szrj case TRUTH_AND_EXPR:
330938fd1498Szrj case TRUTH_OR_EXPR:
331038fd1498Szrj case TRUTH_XOR_EXPR:
331138fd1498Szrj if (OP_SAME (0) && OP_SAME (1))
331238fd1498Szrj return 1;
331338fd1498Szrj
331438fd1498Szrj /* Otherwise take into account this is a commutative operation. */
331538fd1498Szrj return (operand_equal_p (TREE_OPERAND (arg0, 0),
331638fd1498Szrj TREE_OPERAND (arg1, 1), flags)
331738fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 1),
331838fd1498Szrj TREE_OPERAND (arg1, 0), flags));
331938fd1498Szrj
332038fd1498Szrj case COND_EXPR:
332138fd1498Szrj if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
332238fd1498Szrj return 0;
332338fd1498Szrj flags &= ~OEP_ADDRESS_OF;
332438fd1498Szrj return OP_SAME (0);
332538fd1498Szrj
332638fd1498Szrj case BIT_INSERT_EXPR:
332738fd1498Szrj /* BIT_INSERT_EXPR has an implict operand as the type precision
332838fd1498Szrj of op1. Need to check to make sure they are the same. */
332938fd1498Szrj if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
333038fd1498Szrj && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
333138fd1498Szrj && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
333238fd1498Szrj != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
333338fd1498Szrj return false;
333438fd1498Szrj /* FALLTHRU */
333538fd1498Szrj
333638fd1498Szrj case VEC_COND_EXPR:
333738fd1498Szrj case DOT_PROD_EXPR:
333838fd1498Szrj return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
333938fd1498Szrj
334038fd1498Szrj case MODIFY_EXPR:
334138fd1498Szrj case INIT_EXPR:
334238fd1498Szrj case COMPOUND_EXPR:
334338fd1498Szrj case PREDECREMENT_EXPR:
334438fd1498Szrj case PREINCREMENT_EXPR:
334538fd1498Szrj case POSTDECREMENT_EXPR:
334638fd1498Szrj case POSTINCREMENT_EXPR:
334738fd1498Szrj if (flags & OEP_LEXICOGRAPHIC)
334838fd1498Szrj return OP_SAME (0) && OP_SAME (1);
334938fd1498Szrj return 0;
335038fd1498Szrj
335138fd1498Szrj case CLEANUP_POINT_EXPR:
335238fd1498Szrj case EXPR_STMT:
335338fd1498Szrj if (flags & OEP_LEXICOGRAPHIC)
335438fd1498Szrj return OP_SAME (0);
335538fd1498Szrj return 0;
335638fd1498Szrj
335738fd1498Szrj default:
335838fd1498Szrj return 0;
335938fd1498Szrj }
336038fd1498Szrj
336138fd1498Szrj case tcc_vl_exp:
336238fd1498Szrj switch (TREE_CODE (arg0))
336338fd1498Szrj {
336438fd1498Szrj case CALL_EXPR:
336538fd1498Szrj if ((CALL_EXPR_FN (arg0) == NULL_TREE)
336638fd1498Szrj != (CALL_EXPR_FN (arg1) == NULL_TREE))
336738fd1498Szrj /* If not both CALL_EXPRs are either internal or normal function
336838fd1498Szrj functions, then they are not equal. */
336938fd1498Szrj return 0;
337038fd1498Szrj else if (CALL_EXPR_FN (arg0) == NULL_TREE)
337138fd1498Szrj {
337238fd1498Szrj /* If the CALL_EXPRs call different internal functions, then they
337338fd1498Szrj are not equal. */
337438fd1498Szrj if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
337538fd1498Szrj return 0;
337638fd1498Szrj }
337738fd1498Szrj else
337838fd1498Szrj {
337938fd1498Szrj /* If the CALL_EXPRs call different functions, then they are not
338038fd1498Szrj equal. */
338138fd1498Szrj if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
338238fd1498Szrj flags))
338338fd1498Szrj return 0;
338438fd1498Szrj }
338538fd1498Szrj
338638fd1498Szrj /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
338738fd1498Szrj {
338838fd1498Szrj unsigned int cef = call_expr_flags (arg0);
338938fd1498Szrj if (flags & OEP_PURE_SAME)
339038fd1498Szrj cef &= ECF_CONST | ECF_PURE;
339138fd1498Szrj else
339238fd1498Szrj cef &= ECF_CONST;
339338fd1498Szrj if (!cef && !(flags & OEP_LEXICOGRAPHIC))
339438fd1498Szrj return 0;
339538fd1498Szrj }
339638fd1498Szrj
339738fd1498Szrj /* Now see if all the arguments are the same. */
339838fd1498Szrj {
339938fd1498Szrj const_call_expr_arg_iterator iter0, iter1;
340038fd1498Szrj const_tree a0, a1;
340138fd1498Szrj for (a0 = first_const_call_expr_arg (arg0, &iter0),
340238fd1498Szrj a1 = first_const_call_expr_arg (arg1, &iter1);
340338fd1498Szrj a0 && a1;
340438fd1498Szrj a0 = next_const_call_expr_arg (&iter0),
340538fd1498Szrj a1 = next_const_call_expr_arg (&iter1))
340638fd1498Szrj if (! operand_equal_p (a0, a1, flags))
340738fd1498Szrj return 0;
340838fd1498Szrj
340938fd1498Szrj /* If we get here and both argument lists are exhausted
341038fd1498Szrj then the CALL_EXPRs are equal. */
341138fd1498Szrj return ! (a0 || a1);
341238fd1498Szrj }
341338fd1498Szrj default:
341438fd1498Szrj return 0;
341538fd1498Szrj }
341638fd1498Szrj
341738fd1498Szrj case tcc_declaration:
341838fd1498Szrj /* Consider __builtin_sqrt equal to sqrt. */
341938fd1498Szrj return (TREE_CODE (arg0) == FUNCTION_DECL
342038fd1498Szrj && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
342138fd1498Szrj && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
342238fd1498Szrj && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
342338fd1498Szrj
342438fd1498Szrj case tcc_exceptional:
342538fd1498Szrj if (TREE_CODE (arg0) == CONSTRUCTOR)
342638fd1498Szrj {
342738fd1498Szrj /* In GIMPLE constructors are used only to build vectors from
342838fd1498Szrj elements. Individual elements in the constructor must be
342938fd1498Szrj indexed in increasing order and form an initial sequence.
343038fd1498Szrj
343138fd1498Szrj We make no effort to compare constructors in generic.
343238fd1498Szrj (see sem_variable::equals in ipa-icf which can do so for
343338fd1498Szrj constants). */
343438fd1498Szrj if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
343538fd1498Szrj || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
343638fd1498Szrj return 0;
343738fd1498Szrj
343838fd1498Szrj /* Be sure that vectors constructed have the same representation.
343938fd1498Szrj We only tested element precision and modes to match.
344038fd1498Szrj Vectors may be BLKmode and thus also check that the number of
344138fd1498Szrj parts match. */
344238fd1498Szrj if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
344338fd1498Szrj TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
344438fd1498Szrj return 0;
344538fd1498Szrj
344638fd1498Szrj vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
344738fd1498Szrj vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
344838fd1498Szrj unsigned int len = vec_safe_length (v0);
344938fd1498Szrj
345038fd1498Szrj if (len != vec_safe_length (v1))
345138fd1498Szrj return 0;
345238fd1498Szrj
345338fd1498Szrj for (unsigned int i = 0; i < len; i++)
345438fd1498Szrj {
345538fd1498Szrj constructor_elt *c0 = &(*v0)[i];
345638fd1498Szrj constructor_elt *c1 = &(*v1)[i];
345738fd1498Szrj
345838fd1498Szrj if (!operand_equal_p (c0->value, c1->value, flags)
345938fd1498Szrj /* In GIMPLE the indexes can be either NULL or matching i.
346038fd1498Szrj Double check this so we won't get false
346138fd1498Szrj positives for GENERIC. */
346238fd1498Szrj || (c0->index
346338fd1498Szrj && (TREE_CODE (c0->index) != INTEGER_CST
346438fd1498Szrj || !compare_tree_int (c0->index, i)))
346538fd1498Szrj || (c1->index
346638fd1498Szrj && (TREE_CODE (c1->index) != INTEGER_CST
346738fd1498Szrj || !compare_tree_int (c1->index, i))))
346838fd1498Szrj return 0;
346938fd1498Szrj }
347038fd1498Szrj return 1;
347138fd1498Szrj }
347238fd1498Szrj else if (TREE_CODE (arg0) == STATEMENT_LIST
347338fd1498Szrj && (flags & OEP_LEXICOGRAPHIC))
347438fd1498Szrj {
347538fd1498Szrj /* Compare the STATEMENT_LISTs. */
347638fd1498Szrj tree_stmt_iterator tsi1, tsi2;
347738fd1498Szrj tree body1 = CONST_CAST_TREE (arg0);
347838fd1498Szrj tree body2 = CONST_CAST_TREE (arg1);
347938fd1498Szrj for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
348038fd1498Szrj tsi_next (&tsi1), tsi_next (&tsi2))
348138fd1498Szrj {
348238fd1498Szrj /* The lists don't have the same number of statements. */
348338fd1498Szrj if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
348438fd1498Szrj return 0;
348538fd1498Szrj if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
348638fd1498Szrj return 1;
348738fd1498Szrj if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
348838fd1498Szrj flags & (OEP_LEXICOGRAPHIC
348938fd1498Szrj | OEP_NO_HASH_CHECK)))
349038fd1498Szrj return 0;
349138fd1498Szrj }
349238fd1498Szrj }
349338fd1498Szrj return 0;
349438fd1498Szrj
349538fd1498Szrj case tcc_statement:
349638fd1498Szrj switch (TREE_CODE (arg0))
349738fd1498Szrj {
349838fd1498Szrj case RETURN_EXPR:
349938fd1498Szrj if (flags & OEP_LEXICOGRAPHIC)
350038fd1498Szrj return OP_SAME_WITH_NULL (0);
350138fd1498Szrj return 0;
350238fd1498Szrj case DEBUG_BEGIN_STMT:
350338fd1498Szrj if (flags & OEP_LEXICOGRAPHIC)
350438fd1498Szrj return 1;
350538fd1498Szrj return 0;
350638fd1498Szrj default:
350738fd1498Szrj return 0;
350838fd1498Szrj }
350938fd1498Szrj
351038fd1498Szrj default:
351138fd1498Szrj return 0;
351238fd1498Szrj }
351338fd1498Szrj
351438fd1498Szrj #undef OP_SAME
351538fd1498Szrj #undef OP_SAME_WITH_NULL
351638fd1498Szrj }
351738fd1498Szrj
351838fd1498Szrj /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
351938fd1498Szrj with a different signedness or a narrower precision. */
352038fd1498Szrj
352138fd1498Szrj static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)352238fd1498Szrj operand_equal_for_comparison_p (tree arg0, tree arg1)
352338fd1498Szrj {
352438fd1498Szrj if (operand_equal_p (arg0, arg1, 0))
352538fd1498Szrj return true;
352638fd1498Szrj
352738fd1498Szrj if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
352838fd1498Szrj || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
352938fd1498Szrj return false;
353038fd1498Szrj
353138fd1498Szrj /* Discard any conversions that don't change the modes of ARG0 and ARG1
353238fd1498Szrj and see if the inner values are the same. This removes any
353338fd1498Szrj signedness comparison, which doesn't matter here. */
353438fd1498Szrj tree op0 = arg0;
353538fd1498Szrj tree op1 = arg1;
353638fd1498Szrj STRIP_NOPS (op0);
353738fd1498Szrj STRIP_NOPS (op1);
353838fd1498Szrj if (operand_equal_p (op0, op1, 0))
353938fd1498Szrj return true;
354038fd1498Szrj
354138fd1498Szrj /* Discard a single widening conversion from ARG1 and see if the inner
354238fd1498Szrj value is the same as ARG0. */
354338fd1498Szrj if (CONVERT_EXPR_P (arg1)
354438fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
354538fd1498Szrj && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
354638fd1498Szrj < TYPE_PRECISION (TREE_TYPE (arg1))
354738fd1498Szrj && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
354838fd1498Szrj return true;
354938fd1498Szrj
355038fd1498Szrj return false;
355138fd1498Szrj }
355238fd1498Szrj
355338fd1498Szrj /* See if ARG is an expression that is either a comparison or is performing
355438fd1498Szrj arithmetic on comparisons. The comparisons must only be comparing
355538fd1498Szrj two different values, which will be stored in *CVAL1 and *CVAL2; if
355638fd1498Szrj they are nonzero it means that some operands have already been found.
355738fd1498Szrj No variables may be used anywhere else in the expression except in the
355838fd1498Szrj comparisons.
355938fd1498Szrj
356038fd1498Szrj If this is true, return 1. Otherwise, return zero. */
356138fd1498Szrj
356238fd1498Szrj static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)356338fd1498Szrj twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
356438fd1498Szrj {
356538fd1498Szrj enum tree_code code = TREE_CODE (arg);
356638fd1498Szrj enum tree_code_class tclass = TREE_CODE_CLASS (code);
356738fd1498Szrj
356838fd1498Szrj /* We can handle some of the tcc_expression cases here. */
356938fd1498Szrj if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
357038fd1498Szrj tclass = tcc_unary;
357138fd1498Szrj else if (tclass == tcc_expression
357238fd1498Szrj && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
357338fd1498Szrj || code == COMPOUND_EXPR))
357438fd1498Szrj tclass = tcc_binary;
357538fd1498Szrj
357638fd1498Szrj switch (tclass)
357738fd1498Szrj {
357838fd1498Szrj case tcc_unary:
357938fd1498Szrj return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
358038fd1498Szrj
358138fd1498Szrj case tcc_binary:
358238fd1498Szrj return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
358338fd1498Szrj && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
358438fd1498Szrj
358538fd1498Szrj case tcc_constant:
358638fd1498Szrj return 1;
358738fd1498Szrj
358838fd1498Szrj case tcc_expression:
358938fd1498Szrj if (code == COND_EXPR)
359038fd1498Szrj return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
359138fd1498Szrj && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
359238fd1498Szrj && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
359338fd1498Szrj return 0;
359438fd1498Szrj
359538fd1498Szrj case tcc_comparison:
359638fd1498Szrj /* First see if we can handle the first operand, then the second. For
359738fd1498Szrj the second operand, we know *CVAL1 can't be zero. It must be that
359838fd1498Szrj one side of the comparison is each of the values; test for the
359938fd1498Szrj case where this isn't true by failing if the two operands
360038fd1498Szrj are the same. */
360138fd1498Szrj
360238fd1498Szrj if (operand_equal_p (TREE_OPERAND (arg, 0),
360338fd1498Szrj TREE_OPERAND (arg, 1), 0))
360438fd1498Szrj return 0;
360538fd1498Szrj
360638fd1498Szrj if (*cval1 == 0)
360738fd1498Szrj *cval1 = TREE_OPERAND (arg, 0);
360838fd1498Szrj else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
360938fd1498Szrj ;
361038fd1498Szrj else if (*cval2 == 0)
361138fd1498Szrj *cval2 = TREE_OPERAND (arg, 0);
361238fd1498Szrj else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
361338fd1498Szrj ;
361438fd1498Szrj else
361538fd1498Szrj return 0;
361638fd1498Szrj
361738fd1498Szrj if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
361838fd1498Szrj ;
361938fd1498Szrj else if (*cval2 == 0)
362038fd1498Szrj *cval2 = TREE_OPERAND (arg, 1);
362138fd1498Szrj else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
362238fd1498Szrj ;
362338fd1498Szrj else
362438fd1498Szrj return 0;
362538fd1498Szrj
362638fd1498Szrj return 1;
362738fd1498Szrj
362838fd1498Szrj default:
362938fd1498Szrj return 0;
363038fd1498Szrj }
363138fd1498Szrj }
363238fd1498Szrj
363338fd1498Szrj /* ARG is a tree that is known to contain just arithmetic operations and
363438fd1498Szrj comparisons. Evaluate the operations in the tree substituting NEW0 for
363538fd1498Szrj any occurrence of OLD0 as an operand of a comparison and likewise for
363638fd1498Szrj NEW1 and OLD1. */
363738fd1498Szrj
363838fd1498Szrj static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)363938fd1498Szrj eval_subst (location_t loc, tree arg, tree old0, tree new0,
364038fd1498Szrj tree old1, tree new1)
364138fd1498Szrj {
364238fd1498Szrj tree type = TREE_TYPE (arg);
364338fd1498Szrj enum tree_code code = TREE_CODE (arg);
364438fd1498Szrj enum tree_code_class tclass = TREE_CODE_CLASS (code);
364538fd1498Szrj
364638fd1498Szrj /* We can handle some of the tcc_expression cases here. */
364738fd1498Szrj if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
364838fd1498Szrj tclass = tcc_unary;
364938fd1498Szrj else if (tclass == tcc_expression
365038fd1498Szrj && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
365138fd1498Szrj tclass = tcc_binary;
365238fd1498Szrj
365338fd1498Szrj switch (tclass)
365438fd1498Szrj {
365538fd1498Szrj case tcc_unary:
365638fd1498Szrj return fold_build1_loc (loc, code, type,
365738fd1498Szrj eval_subst (loc, TREE_OPERAND (arg, 0),
365838fd1498Szrj old0, new0, old1, new1));
365938fd1498Szrj
366038fd1498Szrj case tcc_binary:
366138fd1498Szrj return fold_build2_loc (loc, code, type,
366238fd1498Szrj eval_subst (loc, TREE_OPERAND (arg, 0),
366338fd1498Szrj old0, new0, old1, new1),
366438fd1498Szrj eval_subst (loc, TREE_OPERAND (arg, 1),
366538fd1498Szrj old0, new0, old1, new1));
366638fd1498Szrj
366738fd1498Szrj case tcc_expression:
366838fd1498Szrj switch (code)
366938fd1498Szrj {
367038fd1498Szrj case SAVE_EXPR:
367138fd1498Szrj return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
367238fd1498Szrj old1, new1);
367338fd1498Szrj
367438fd1498Szrj case COMPOUND_EXPR:
367538fd1498Szrj return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
367638fd1498Szrj old1, new1);
367738fd1498Szrj
367838fd1498Szrj case COND_EXPR:
367938fd1498Szrj return fold_build3_loc (loc, code, type,
368038fd1498Szrj eval_subst (loc, TREE_OPERAND (arg, 0),
368138fd1498Szrj old0, new0, old1, new1),
368238fd1498Szrj eval_subst (loc, TREE_OPERAND (arg, 1),
368338fd1498Szrj old0, new0, old1, new1),
368438fd1498Szrj eval_subst (loc, TREE_OPERAND (arg, 2),
368538fd1498Szrj old0, new0, old1, new1));
368638fd1498Szrj default:
368738fd1498Szrj break;
368838fd1498Szrj }
368938fd1498Szrj /* Fall through - ??? */
369038fd1498Szrj
369138fd1498Szrj case tcc_comparison:
369238fd1498Szrj {
369338fd1498Szrj tree arg0 = TREE_OPERAND (arg, 0);
369438fd1498Szrj tree arg1 = TREE_OPERAND (arg, 1);
369538fd1498Szrj
369638fd1498Szrj /* We need to check both for exact equality and tree equality. The
369738fd1498Szrj former will be true if the operand has a side-effect. In that
369838fd1498Szrj case, we know the operand occurred exactly once. */
369938fd1498Szrj
370038fd1498Szrj if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
370138fd1498Szrj arg0 = new0;
370238fd1498Szrj else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
370338fd1498Szrj arg0 = new1;
370438fd1498Szrj
370538fd1498Szrj if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
370638fd1498Szrj arg1 = new0;
370738fd1498Szrj else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
370838fd1498Szrj arg1 = new1;
370938fd1498Szrj
371038fd1498Szrj return fold_build2_loc (loc, code, type, arg0, arg1);
371138fd1498Szrj }
371238fd1498Szrj
371338fd1498Szrj default:
371438fd1498Szrj return arg;
371538fd1498Szrj }
371638fd1498Szrj }
371738fd1498Szrj
371838fd1498Szrj /* Return a tree for the case when the result of an expression is RESULT
371938fd1498Szrj converted to TYPE and OMITTED was previously an operand of the expression
372038fd1498Szrj but is now not needed (e.g., we folded OMITTED * 0).
372138fd1498Szrj
372238fd1498Szrj If OMITTED has side effects, we must evaluate it. Otherwise, just do
372338fd1498Szrj the conversion of RESULT to TYPE. */
372438fd1498Szrj
372538fd1498Szrj tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)372638fd1498Szrj omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
372738fd1498Szrj {
372838fd1498Szrj tree t = fold_convert_loc (loc, type, result);
372938fd1498Szrj
373038fd1498Szrj /* If the resulting operand is an empty statement, just return the omitted
373138fd1498Szrj statement casted to void. */
373238fd1498Szrj if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
373338fd1498Szrj return build1_loc (loc, NOP_EXPR, void_type_node,
373438fd1498Szrj fold_ignored_result (omitted));
373538fd1498Szrj
373638fd1498Szrj if (TREE_SIDE_EFFECTS (omitted))
373738fd1498Szrj return build2_loc (loc, COMPOUND_EXPR, type,
373838fd1498Szrj fold_ignored_result (omitted), t);
373938fd1498Szrj
374038fd1498Szrj return non_lvalue_loc (loc, t);
374138fd1498Szrj }
374238fd1498Szrj
374338fd1498Szrj /* Return a tree for the case when the result of an expression is RESULT
374438fd1498Szrj converted to TYPE and OMITTED1 and OMITTED2 were previously operands
374538fd1498Szrj of the expression but are now not needed.
374638fd1498Szrj
374738fd1498Szrj If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
374838fd1498Szrj If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
374938fd1498Szrj evaluated before OMITTED2. Otherwise, if neither has side effects,
375038fd1498Szrj just do the conversion of RESULT to TYPE. */
375138fd1498Szrj
375238fd1498Szrj tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)375338fd1498Szrj omit_two_operands_loc (location_t loc, tree type, tree result,
375438fd1498Szrj tree omitted1, tree omitted2)
375538fd1498Szrj {
375638fd1498Szrj tree t = fold_convert_loc (loc, type, result);
375738fd1498Szrj
375838fd1498Szrj if (TREE_SIDE_EFFECTS (omitted2))
375938fd1498Szrj t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
376038fd1498Szrj if (TREE_SIDE_EFFECTS (omitted1))
376138fd1498Szrj t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
376238fd1498Szrj
376338fd1498Szrj return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
376438fd1498Szrj }
376538fd1498Szrj
376638fd1498Szrj
376738fd1498Szrj /* Return a simplified tree node for the truth-negation of ARG. This
376838fd1498Szrj never alters ARG itself. We assume that ARG is an operation that
376938fd1498Szrj returns a truth value (0 or 1).
377038fd1498Szrj
377138fd1498Szrj FIXME: one would think we would fold the result, but it causes
377238fd1498Szrj problems with the dominator optimizer. */
377338fd1498Szrj
377438fd1498Szrj static tree
fold_truth_not_expr(location_t loc,tree arg)377538fd1498Szrj fold_truth_not_expr (location_t loc, tree arg)
377638fd1498Szrj {
377738fd1498Szrj tree type = TREE_TYPE (arg);
377838fd1498Szrj enum tree_code code = TREE_CODE (arg);
377938fd1498Szrj location_t loc1, loc2;
378038fd1498Szrj
378138fd1498Szrj /* If this is a comparison, we can simply invert it, except for
378238fd1498Szrj floating-point non-equality comparisons, in which case we just
378338fd1498Szrj enclose a TRUTH_NOT_EXPR around what we have. */
378438fd1498Szrj
378538fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_comparison)
378638fd1498Szrj {
378738fd1498Szrj tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
378838fd1498Szrj if (FLOAT_TYPE_P (op_type)
378938fd1498Szrj && flag_trapping_math
379038fd1498Szrj && code != ORDERED_EXPR && code != UNORDERED_EXPR
379138fd1498Szrj && code != NE_EXPR && code != EQ_EXPR)
379238fd1498Szrj return NULL_TREE;
379338fd1498Szrj
379438fd1498Szrj code = invert_tree_comparison (code, HONOR_NANS (op_type));
379538fd1498Szrj if (code == ERROR_MARK)
379638fd1498Szrj return NULL_TREE;
379738fd1498Szrj
379838fd1498Szrj tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
379938fd1498Szrj TREE_OPERAND (arg, 1));
380038fd1498Szrj if (TREE_NO_WARNING (arg))
380138fd1498Szrj TREE_NO_WARNING (ret) = 1;
380238fd1498Szrj return ret;
380338fd1498Szrj }
380438fd1498Szrj
380538fd1498Szrj switch (code)
380638fd1498Szrj {
380738fd1498Szrj case INTEGER_CST:
380838fd1498Szrj return constant_boolean_node (integer_zerop (arg), type);
380938fd1498Szrj
381038fd1498Szrj case TRUTH_AND_EXPR:
381138fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
381238fd1498Szrj loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
381338fd1498Szrj return build2_loc (loc, TRUTH_OR_EXPR, type,
381438fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
381538fd1498Szrj invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
381638fd1498Szrj
381738fd1498Szrj case TRUTH_OR_EXPR:
381838fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
381938fd1498Szrj loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
382038fd1498Szrj return build2_loc (loc, TRUTH_AND_EXPR, type,
382138fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
382238fd1498Szrj invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
382338fd1498Szrj
382438fd1498Szrj case TRUTH_XOR_EXPR:
382538fd1498Szrj /* Here we can invert either operand. We invert the first operand
382638fd1498Szrj unless the second operand is a TRUTH_NOT_EXPR in which case our
382738fd1498Szrj result is the XOR of the first operand with the inside of the
382838fd1498Szrj negation of the second operand. */
382938fd1498Szrj
383038fd1498Szrj if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
383138fd1498Szrj return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
383238fd1498Szrj TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
383338fd1498Szrj else
383438fd1498Szrj return build2_loc (loc, TRUTH_XOR_EXPR, type,
383538fd1498Szrj invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
383638fd1498Szrj TREE_OPERAND (arg, 1));
383738fd1498Szrj
383838fd1498Szrj case TRUTH_ANDIF_EXPR:
383938fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
384038fd1498Szrj loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
384138fd1498Szrj return build2_loc (loc, TRUTH_ORIF_EXPR, type,
384238fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
384338fd1498Szrj invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
384438fd1498Szrj
384538fd1498Szrj case TRUTH_ORIF_EXPR:
384638fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
384738fd1498Szrj loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
384838fd1498Szrj return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
384938fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
385038fd1498Szrj invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
385138fd1498Szrj
385238fd1498Szrj case TRUTH_NOT_EXPR:
385338fd1498Szrj return TREE_OPERAND (arg, 0);
385438fd1498Szrj
385538fd1498Szrj case COND_EXPR:
385638fd1498Szrj {
385738fd1498Szrj tree arg1 = TREE_OPERAND (arg, 1);
385838fd1498Szrj tree arg2 = TREE_OPERAND (arg, 2);
385938fd1498Szrj
386038fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
386138fd1498Szrj loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
386238fd1498Szrj
386338fd1498Szrj /* A COND_EXPR may have a throw as one operand, which
386438fd1498Szrj then has void type. Just leave void operands
386538fd1498Szrj as they are. */
386638fd1498Szrj return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
386738fd1498Szrj VOID_TYPE_P (TREE_TYPE (arg1))
386838fd1498Szrj ? arg1 : invert_truthvalue_loc (loc1, arg1),
386938fd1498Szrj VOID_TYPE_P (TREE_TYPE (arg2))
387038fd1498Szrj ? arg2 : invert_truthvalue_loc (loc2, arg2));
387138fd1498Szrj }
387238fd1498Szrj
387338fd1498Szrj case COMPOUND_EXPR:
387438fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
387538fd1498Szrj return build2_loc (loc, COMPOUND_EXPR, type,
387638fd1498Szrj TREE_OPERAND (arg, 0),
387738fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
387838fd1498Szrj
387938fd1498Szrj case NON_LVALUE_EXPR:
388038fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
388138fd1498Szrj return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
388238fd1498Szrj
388338fd1498Szrj CASE_CONVERT:
388438fd1498Szrj if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
388538fd1498Szrj return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
388638fd1498Szrj
388738fd1498Szrj /* fall through */
388838fd1498Szrj
388938fd1498Szrj case FLOAT_EXPR:
389038fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
389138fd1498Szrj return build1_loc (loc, TREE_CODE (arg), type,
389238fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
389338fd1498Szrj
389438fd1498Szrj case BIT_AND_EXPR:
389538fd1498Szrj if (!integer_onep (TREE_OPERAND (arg, 1)))
389638fd1498Szrj return NULL_TREE;
389738fd1498Szrj return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
389838fd1498Szrj
389938fd1498Szrj case SAVE_EXPR:
390038fd1498Szrj return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
390138fd1498Szrj
390238fd1498Szrj case CLEANUP_POINT_EXPR:
390338fd1498Szrj loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
390438fd1498Szrj return build1_loc (loc, CLEANUP_POINT_EXPR, type,
390538fd1498Szrj invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
390638fd1498Szrj
390738fd1498Szrj default:
390838fd1498Szrj return NULL_TREE;
390938fd1498Szrj }
391038fd1498Szrj }
391138fd1498Szrj
391238fd1498Szrj /* Fold the truth-negation of ARG. This never alters ARG itself. We
391338fd1498Szrj assume that ARG is an operation that returns a truth value (0 or 1
391438fd1498Szrj for scalars, 0 or -1 for vectors). Return the folded expression if
391538fd1498Szrj folding is successful. Otherwise, return NULL_TREE. */
391638fd1498Szrj
391738fd1498Szrj static tree
fold_invert_truthvalue(location_t loc,tree arg)391838fd1498Szrj fold_invert_truthvalue (location_t loc, tree arg)
391938fd1498Szrj {
392038fd1498Szrj tree type = TREE_TYPE (arg);
392138fd1498Szrj return fold_unary_loc (loc, VECTOR_TYPE_P (type)
392238fd1498Szrj ? BIT_NOT_EXPR
392338fd1498Szrj : TRUTH_NOT_EXPR,
392438fd1498Szrj type, arg);
392538fd1498Szrj }
392638fd1498Szrj
392738fd1498Szrj /* Return a simplified tree node for the truth-negation of ARG. This
392838fd1498Szrj never alters ARG itself. We assume that ARG is an operation that
392938fd1498Szrj returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
393038fd1498Szrj
393138fd1498Szrj tree
invert_truthvalue_loc(location_t loc,tree arg)393238fd1498Szrj invert_truthvalue_loc (location_t loc, tree arg)
393338fd1498Szrj {
393438fd1498Szrj if (TREE_CODE (arg) == ERROR_MARK)
393538fd1498Szrj return arg;
393638fd1498Szrj
393738fd1498Szrj tree type = TREE_TYPE (arg);
393838fd1498Szrj return fold_build1_loc (loc, VECTOR_TYPE_P (type)
393938fd1498Szrj ? BIT_NOT_EXPR
394038fd1498Szrj : TRUTH_NOT_EXPR,
394138fd1498Szrj type, arg);
394238fd1498Szrj }
394338fd1498Szrj
394438fd1498Szrj /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
394538fd1498Szrj starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
394638fd1498Szrj and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
394738fd1498Szrj is the original memory reference used to preserve the alias set of
394838fd1498Szrj the access. */
394938fd1498Szrj
395038fd1498Szrj static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)395138fd1498Szrj make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
395238fd1498Szrj HOST_WIDE_INT bitsize, poly_int64 bitpos,
395338fd1498Szrj int unsignedp, int reversep)
395438fd1498Szrj {
395538fd1498Szrj tree result, bftype;
395638fd1498Szrj
395738fd1498Szrj /* Attempt not to lose the access path if possible. */
395838fd1498Szrj if (TREE_CODE (orig_inner) == COMPONENT_REF)
395938fd1498Szrj {
396038fd1498Szrj tree ninner = TREE_OPERAND (orig_inner, 0);
396138fd1498Szrj machine_mode nmode;
396238fd1498Szrj poly_int64 nbitsize, nbitpos;
396338fd1498Szrj tree noffset;
396438fd1498Szrj int nunsignedp, nreversep, nvolatilep = 0;
396538fd1498Szrj tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
396638fd1498Szrj &noffset, &nmode, &nunsignedp,
396738fd1498Szrj &nreversep, &nvolatilep);
396838fd1498Szrj if (base == inner
396938fd1498Szrj && noffset == NULL_TREE
397038fd1498Szrj && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
397138fd1498Szrj && !reversep
397238fd1498Szrj && !nreversep
397338fd1498Szrj && !nvolatilep)
397438fd1498Szrj {
397538fd1498Szrj inner = ninner;
397638fd1498Szrj bitpos -= nbitpos;
397738fd1498Szrj }
397838fd1498Szrj }
397938fd1498Szrj
398038fd1498Szrj alias_set_type iset = get_alias_set (orig_inner);
398138fd1498Szrj if (iset == 0 && get_alias_set (inner) != iset)
398238fd1498Szrj inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
398338fd1498Szrj build_fold_addr_expr (inner),
398438fd1498Szrj build_int_cst (ptr_type_node, 0));
398538fd1498Szrj
398638fd1498Szrj if (known_eq (bitpos, 0) && !reversep)
398738fd1498Szrj {
398838fd1498Szrj tree size = TYPE_SIZE (TREE_TYPE (inner));
398938fd1498Szrj if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
399038fd1498Szrj || POINTER_TYPE_P (TREE_TYPE (inner)))
399138fd1498Szrj && tree_fits_shwi_p (size)
399238fd1498Szrj && tree_to_shwi (size) == bitsize)
399338fd1498Szrj return fold_convert_loc (loc, type, inner);
399438fd1498Szrj }
399538fd1498Szrj
399638fd1498Szrj bftype = type;
399738fd1498Szrj if (TYPE_PRECISION (bftype) != bitsize
399838fd1498Szrj || TYPE_UNSIGNED (bftype) == !unsignedp)
399938fd1498Szrj bftype = build_nonstandard_integer_type (bitsize, 0);
400038fd1498Szrj
400138fd1498Szrj result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
400238fd1498Szrj bitsize_int (bitsize), bitsize_int (bitpos));
400338fd1498Szrj REF_REVERSE_STORAGE_ORDER (result) = reversep;
400438fd1498Szrj
400538fd1498Szrj if (bftype != type)
400638fd1498Szrj result = fold_convert_loc (loc, type, result);
400738fd1498Szrj
400838fd1498Szrj return result;
400938fd1498Szrj }
401038fd1498Szrj
401138fd1498Szrj /* Optimize a bit-field compare.
401238fd1498Szrj
401338fd1498Szrj There are two cases: First is a compare against a constant and the
401438fd1498Szrj second is a comparison of two items where the fields are at the same
401538fd1498Szrj bit position relative to the start of a chunk (byte, halfword, word)
401638fd1498Szrj large enough to contain it. In these cases we can avoid the shift
401738fd1498Szrj implicit in bitfield extractions.
401838fd1498Szrj
401938fd1498Szrj For constants, we emit a compare of the shifted constant with the
402038fd1498Szrj BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
402138fd1498Szrj compared. For two fields at the same position, we do the ANDs with the
402238fd1498Szrj similar mask and compare the result of the ANDs.
402338fd1498Szrj
402438fd1498Szrj CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
402538fd1498Szrj COMPARE_TYPE is the type of the comparison, and LHS and RHS
402638fd1498Szrj are the left and right operands of the comparison, respectively.
402738fd1498Szrj
402838fd1498Szrj If the optimization described above can be done, we return the resulting
402938fd1498Szrj tree. Otherwise we return zero. */
403038fd1498Szrj
403138fd1498Szrj static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)403238fd1498Szrj optimize_bit_field_compare (location_t loc, enum tree_code code,
403338fd1498Szrj tree compare_type, tree lhs, tree rhs)
403438fd1498Szrj {
403538fd1498Szrj poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
403638fd1498Szrj HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
403738fd1498Szrj tree type = TREE_TYPE (lhs);
403838fd1498Szrj tree unsigned_type;
403938fd1498Szrj int const_p = TREE_CODE (rhs) == INTEGER_CST;
404038fd1498Szrj machine_mode lmode, rmode;
404138fd1498Szrj scalar_int_mode nmode;
404238fd1498Szrj int lunsignedp, runsignedp;
404338fd1498Szrj int lreversep, rreversep;
404438fd1498Szrj int lvolatilep = 0, rvolatilep = 0;
404538fd1498Szrj tree linner, rinner = NULL_TREE;
404638fd1498Szrj tree mask;
404738fd1498Szrj tree offset;
404838fd1498Szrj
404938fd1498Szrj /* Get all the information about the extractions being done. If the bit size
405038fd1498Szrj is the same as the size of the underlying object, we aren't doing an
405138fd1498Szrj extraction at all and so can do nothing. We also don't want to
405238fd1498Szrj do anything if the inner expression is a PLACEHOLDER_EXPR since we
405338fd1498Szrj then will no longer be able to replace it. */
405438fd1498Szrj linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
405538fd1498Szrj &lunsignedp, &lreversep, &lvolatilep);
405638fd1498Szrj if (linner == lhs
405738fd1498Szrj || !known_size_p (plbitsize)
405838fd1498Szrj || !plbitsize.is_constant (&lbitsize)
405938fd1498Szrj || !plbitpos.is_constant (&lbitpos)
406038fd1498Szrj || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
406138fd1498Szrj || offset != 0
406238fd1498Szrj || TREE_CODE (linner) == PLACEHOLDER_EXPR
406338fd1498Szrj || lvolatilep)
406438fd1498Szrj return 0;
406538fd1498Szrj
406638fd1498Szrj if (const_p)
406738fd1498Szrj rreversep = lreversep;
406838fd1498Szrj else
406938fd1498Szrj {
407038fd1498Szrj /* If this is not a constant, we can only do something if bit positions,
407138fd1498Szrj sizes, signedness and storage order are the same. */
407238fd1498Szrj rinner
407338fd1498Szrj = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
407438fd1498Szrj &runsignedp, &rreversep, &rvolatilep);
407538fd1498Szrj
407638fd1498Szrj if (rinner == rhs
407738fd1498Szrj || maybe_ne (lbitpos, rbitpos)
407838fd1498Szrj || maybe_ne (lbitsize, rbitsize)
407938fd1498Szrj || lunsignedp != runsignedp
408038fd1498Szrj || lreversep != rreversep
408138fd1498Szrj || offset != 0
408238fd1498Szrj || TREE_CODE (rinner) == PLACEHOLDER_EXPR
408338fd1498Szrj || rvolatilep)
408438fd1498Szrj return 0;
408538fd1498Szrj }
408638fd1498Szrj
408738fd1498Szrj /* Honor the C++ memory model and mimic what RTL expansion does. */
408838fd1498Szrj poly_uint64 bitstart = 0;
408938fd1498Szrj poly_uint64 bitend = 0;
409038fd1498Szrj if (TREE_CODE (lhs) == COMPONENT_REF)
409138fd1498Szrj {
409238fd1498Szrj get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
409338fd1498Szrj if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
409438fd1498Szrj return 0;
409538fd1498Szrj }
409638fd1498Szrj
409738fd1498Szrj /* See if we can find a mode to refer to this field. We should be able to,
409838fd1498Szrj but fail if we can't. */
409938fd1498Szrj if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
410038fd1498Szrj const_p ? TYPE_ALIGN (TREE_TYPE (linner))
410138fd1498Szrj : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
410238fd1498Szrj TYPE_ALIGN (TREE_TYPE (rinner))),
410338fd1498Szrj BITS_PER_WORD, false, &nmode))
410438fd1498Szrj return 0;
410538fd1498Szrj
410638fd1498Szrj /* Set signed and unsigned types of the precision of this mode for the
410738fd1498Szrj shifts below. */
410838fd1498Szrj unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
410938fd1498Szrj
411038fd1498Szrj /* Compute the bit position and size for the new reference and our offset
411138fd1498Szrj within it. If the new reference is the same size as the original, we
411238fd1498Szrj won't optimize anything, so return zero. */
411338fd1498Szrj nbitsize = GET_MODE_BITSIZE (nmode);
411438fd1498Szrj nbitpos = lbitpos & ~ (nbitsize - 1);
411538fd1498Szrj lbitpos -= nbitpos;
411638fd1498Szrj if (nbitsize == lbitsize)
411738fd1498Szrj return 0;
411838fd1498Szrj
411938fd1498Szrj if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
412038fd1498Szrj lbitpos = nbitsize - lbitsize - lbitpos;
412138fd1498Szrj
412238fd1498Szrj /* Make the mask to be used against the extracted field. */
412338fd1498Szrj mask = build_int_cst_type (unsigned_type, -1);
412438fd1498Szrj mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
412538fd1498Szrj mask = const_binop (RSHIFT_EXPR, mask,
412638fd1498Szrj size_int (nbitsize - lbitsize - lbitpos));
412738fd1498Szrj
412838fd1498Szrj if (! const_p)
412938fd1498Szrj {
413038fd1498Szrj if (nbitpos < 0)
413138fd1498Szrj return 0;
413238fd1498Szrj
413338fd1498Szrj /* If not comparing with constant, just rework the comparison
413438fd1498Szrj and return. */
413538fd1498Szrj tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
413638fd1498Szrj nbitsize, nbitpos, 1, lreversep);
413738fd1498Szrj t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
413838fd1498Szrj tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
413938fd1498Szrj nbitsize, nbitpos, 1, rreversep);
414038fd1498Szrj t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
414138fd1498Szrj return fold_build2_loc (loc, code, compare_type, t1, t2);
414238fd1498Szrj }
414338fd1498Szrj
414438fd1498Szrj /* Otherwise, we are handling the constant case. See if the constant is too
414538fd1498Szrj big for the field. Warn and return a tree for 0 (false) if so. We do
414638fd1498Szrj this not only for its own sake, but to avoid having to test for this
414738fd1498Szrj error case below. If we didn't, we might generate wrong code.
414838fd1498Szrj
414938fd1498Szrj For unsigned fields, the constant shifted right by the field length should
415038fd1498Szrj be all zero. For signed fields, the high-order bits should agree with
415138fd1498Szrj the sign bit. */
415238fd1498Szrj
415338fd1498Szrj if (lunsignedp)
415438fd1498Szrj {
415538fd1498Szrj if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
415638fd1498Szrj {
415738fd1498Szrj warning (0, "comparison is always %d due to width of bit-field",
415838fd1498Szrj code == NE_EXPR);
415938fd1498Szrj return constant_boolean_node (code == NE_EXPR, compare_type);
416038fd1498Szrj }
416138fd1498Szrj }
416238fd1498Szrj else
416338fd1498Szrj {
416438fd1498Szrj wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
416538fd1498Szrj if (tem != 0 && tem != -1)
416638fd1498Szrj {
416738fd1498Szrj warning (0, "comparison is always %d due to width of bit-field",
416838fd1498Szrj code == NE_EXPR);
416938fd1498Szrj return constant_boolean_node (code == NE_EXPR, compare_type);
417038fd1498Szrj }
417138fd1498Szrj }
417238fd1498Szrj
417338fd1498Szrj if (nbitpos < 0)
417438fd1498Szrj return 0;
417538fd1498Szrj
417638fd1498Szrj /* Single-bit compares should always be against zero. */
417738fd1498Szrj if (lbitsize == 1 && ! integer_zerop (rhs))
417838fd1498Szrj {
417938fd1498Szrj code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
418038fd1498Szrj rhs = build_int_cst (type, 0);
418138fd1498Szrj }
418238fd1498Szrj
418338fd1498Szrj /* Make a new bitfield reference, shift the constant over the
418438fd1498Szrj appropriate number of bits and mask it with the computed mask
418538fd1498Szrj (in case this was a signed field). If we changed it, make a new one. */
418638fd1498Szrj lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
418738fd1498Szrj nbitsize, nbitpos, 1, lreversep);
418838fd1498Szrj
418938fd1498Szrj rhs = const_binop (BIT_AND_EXPR,
419038fd1498Szrj const_binop (LSHIFT_EXPR,
419138fd1498Szrj fold_convert_loc (loc, unsigned_type, rhs),
419238fd1498Szrj size_int (lbitpos)),
419338fd1498Szrj mask);
419438fd1498Szrj
419538fd1498Szrj lhs = build2_loc (loc, code, compare_type,
419638fd1498Szrj build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
419738fd1498Szrj return lhs;
419838fd1498Szrj }
419938fd1498Szrj
420038fd1498Szrj /* Subroutine for fold_truth_andor_1: decode a field reference.
420138fd1498Szrj
420238fd1498Szrj If EXP is a comparison reference, we return the innermost reference.
420338fd1498Szrj
420438fd1498Szrj *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
420538fd1498Szrj set to the starting bit number.
420638fd1498Szrj
420738fd1498Szrj If the innermost field can be completely contained in a mode-sized
420838fd1498Szrj unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
420938fd1498Szrj
421038fd1498Szrj *PVOLATILEP is set to 1 if the any expression encountered is volatile;
421138fd1498Szrj otherwise it is not changed.
421238fd1498Szrj
421338fd1498Szrj *PUNSIGNEDP is set to the signedness of the field.
421438fd1498Szrj
421538fd1498Szrj *PREVERSEP is set to the storage order of the field.
421638fd1498Szrj
421738fd1498Szrj *PMASK is set to the mask used. This is either contained in a
421838fd1498Szrj BIT_AND_EXPR or derived from the width of the field.
421938fd1498Szrj
422038fd1498Szrj *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
422138fd1498Szrj
422238fd1498Szrj Return 0 if this is not a component reference or is one that we can't
422338fd1498Szrj do anything with. */
422438fd1498Szrj
422538fd1498Szrj static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)422638fd1498Szrj decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
422738fd1498Szrj HOST_WIDE_INT *pbitpos, machine_mode *pmode,
422838fd1498Szrj int *punsignedp, int *preversep, int *pvolatilep,
422938fd1498Szrj tree *pmask, tree *pand_mask)
423038fd1498Szrj {
423138fd1498Szrj tree exp = *exp_;
423238fd1498Szrj tree outer_type = 0;
423338fd1498Szrj tree and_mask = 0;
423438fd1498Szrj tree mask, inner, offset;
423538fd1498Szrj tree unsigned_type;
423638fd1498Szrj unsigned int precision;
423738fd1498Szrj
423838fd1498Szrj /* All the optimizations using this function assume integer fields.
423938fd1498Szrj There are problems with FP fields since the type_for_size call
424038fd1498Szrj below can fail for, e.g., XFmode. */
424138fd1498Szrj if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
424238fd1498Szrj return 0;
424338fd1498Szrj
424438fd1498Szrj /* We are interested in the bare arrangement of bits, so strip everything
424538fd1498Szrj that doesn't affect the machine mode. However, record the type of the
424638fd1498Szrj outermost expression if it may matter below. */
424738fd1498Szrj if (CONVERT_EXPR_P (exp)
424838fd1498Szrj || TREE_CODE (exp) == NON_LVALUE_EXPR)
424938fd1498Szrj outer_type = TREE_TYPE (exp);
425038fd1498Szrj STRIP_NOPS (exp);
425138fd1498Szrj
425238fd1498Szrj if (TREE_CODE (exp) == BIT_AND_EXPR)
425338fd1498Szrj {
425438fd1498Szrj and_mask = TREE_OPERAND (exp, 1);
425538fd1498Szrj exp = TREE_OPERAND (exp, 0);
425638fd1498Szrj STRIP_NOPS (exp); STRIP_NOPS (and_mask);
425738fd1498Szrj if (TREE_CODE (and_mask) != INTEGER_CST)
425838fd1498Szrj return 0;
425938fd1498Szrj }
426038fd1498Szrj
426138fd1498Szrj poly_int64 poly_bitsize, poly_bitpos;
426238fd1498Szrj inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
426338fd1498Szrj pmode, punsignedp, preversep, pvolatilep);
426438fd1498Szrj if ((inner == exp && and_mask == 0)
426538fd1498Szrj || !poly_bitsize.is_constant (pbitsize)
426638fd1498Szrj || !poly_bitpos.is_constant (pbitpos)
426738fd1498Szrj || *pbitsize < 0
426838fd1498Szrj || offset != 0
426938fd1498Szrj || TREE_CODE (inner) == PLACEHOLDER_EXPR
427038fd1498Szrj /* Reject out-of-bound accesses (PR79731). */
427138fd1498Szrj || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
427238fd1498Szrj && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
427338fd1498Szrj *pbitpos + *pbitsize) < 0))
427438fd1498Szrj return 0;
427538fd1498Szrj
427638fd1498Szrj *exp_ = exp;
427738fd1498Szrj
427838fd1498Szrj /* If the number of bits in the reference is the same as the bitsize of
427938fd1498Szrj the outer type, then the outer type gives the signedness. Otherwise
428038fd1498Szrj (in case of a small bitfield) the signedness is unchanged. */
428138fd1498Szrj if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
428238fd1498Szrj *punsignedp = TYPE_UNSIGNED (outer_type);
428338fd1498Szrj
428438fd1498Szrj /* Compute the mask to access the bitfield. */
428538fd1498Szrj unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
428638fd1498Szrj precision = TYPE_PRECISION (unsigned_type);
428738fd1498Szrj
428838fd1498Szrj mask = build_int_cst_type (unsigned_type, -1);
428938fd1498Szrj
429038fd1498Szrj mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
429138fd1498Szrj mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
429238fd1498Szrj
429338fd1498Szrj /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
429438fd1498Szrj if (and_mask != 0)
429538fd1498Szrj mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
429638fd1498Szrj fold_convert_loc (loc, unsigned_type, and_mask), mask);
429738fd1498Szrj
429838fd1498Szrj *pmask = mask;
429938fd1498Szrj *pand_mask = and_mask;
430038fd1498Szrj return inner;
430138fd1498Szrj }
430238fd1498Szrj
430338fd1498Szrj /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
430438fd1498Szrj bit positions and MASK is SIGNED. */
430538fd1498Szrj
430638fd1498Szrj static int
all_ones_mask_p(const_tree mask,unsigned int size)430738fd1498Szrj all_ones_mask_p (const_tree mask, unsigned int size)
430838fd1498Szrj {
430938fd1498Szrj tree type = TREE_TYPE (mask);
431038fd1498Szrj unsigned int precision = TYPE_PRECISION (type);
431138fd1498Szrj
431238fd1498Szrj /* If this function returns true when the type of the mask is
431338fd1498Szrj UNSIGNED, then there will be errors. In particular see
431438fd1498Szrj gcc.c-torture/execute/990326-1.c. There does not appear to be
431538fd1498Szrj any documentation paper trail as to why this is so. But the pre
431638fd1498Szrj wide-int worked with that restriction and it has been preserved
431738fd1498Szrj here. */
431838fd1498Szrj if (size > precision || TYPE_SIGN (type) == UNSIGNED)
431938fd1498Szrj return false;
432038fd1498Szrj
432138fd1498Szrj return wi::mask (size, false, precision) == wi::to_wide (mask);
432238fd1498Szrj }
432338fd1498Szrj
432438fd1498Szrj /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
432538fd1498Szrj represents the sign bit of EXP's type. If EXP represents a sign
432638fd1498Szrj or zero extension, also test VAL against the unextended type.
432738fd1498Szrj The return value is the (sub)expression whose sign bit is VAL,
432838fd1498Szrj or NULL_TREE otherwise. */
432938fd1498Szrj
433038fd1498Szrj tree
sign_bit_p(tree exp,const_tree val)433138fd1498Szrj sign_bit_p (tree exp, const_tree val)
433238fd1498Szrj {
433338fd1498Szrj int width;
433438fd1498Szrj tree t;
433538fd1498Szrj
433638fd1498Szrj /* Tree EXP must have an integral type. */
433738fd1498Szrj t = TREE_TYPE (exp);
433838fd1498Szrj if (! INTEGRAL_TYPE_P (t))
433938fd1498Szrj return NULL_TREE;
434038fd1498Szrj
434138fd1498Szrj /* Tree VAL must be an integer constant. */
434238fd1498Szrj if (TREE_CODE (val) != INTEGER_CST
434338fd1498Szrj || TREE_OVERFLOW (val))
434438fd1498Szrj return NULL_TREE;
434538fd1498Szrj
434638fd1498Szrj width = TYPE_PRECISION (t);
434738fd1498Szrj if (wi::only_sign_bit_p (wi::to_wide (val), width))
434838fd1498Szrj return exp;
434938fd1498Szrj
435038fd1498Szrj /* Handle extension from a narrower type. */
435138fd1498Szrj if (TREE_CODE (exp) == NOP_EXPR
435238fd1498Szrj && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
435338fd1498Szrj return sign_bit_p (TREE_OPERAND (exp, 0), val);
435438fd1498Szrj
435538fd1498Szrj return NULL_TREE;
435638fd1498Szrj }
435738fd1498Szrj
435838fd1498Szrj /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
435938fd1498Szrj to be evaluated unconditionally. */
436038fd1498Szrj
436138fd1498Szrj static int
simple_operand_p(const_tree exp)436238fd1498Szrj simple_operand_p (const_tree exp)
436338fd1498Szrj {
436438fd1498Szrj /* Strip any conversions that don't change the machine mode. */
436538fd1498Szrj STRIP_NOPS (exp);
436638fd1498Szrj
436738fd1498Szrj return (CONSTANT_CLASS_P (exp)
436838fd1498Szrj || TREE_CODE (exp) == SSA_NAME
436938fd1498Szrj || (DECL_P (exp)
437038fd1498Szrj && ! TREE_ADDRESSABLE (exp)
437138fd1498Szrj && ! TREE_THIS_VOLATILE (exp)
437238fd1498Szrj && ! DECL_NONLOCAL (exp)
437338fd1498Szrj /* Don't regard global variables as simple. They may be
437438fd1498Szrj allocated in ways unknown to the compiler (shared memory,
437538fd1498Szrj #pragma weak, etc). */
437638fd1498Szrj && ! TREE_PUBLIC (exp)
437738fd1498Szrj && ! DECL_EXTERNAL (exp)
437838fd1498Szrj /* Weakrefs are not safe to be read, since they can be NULL.
437938fd1498Szrj They are !TREE_PUBLIC && !DECL_EXTERNAL but still
438038fd1498Szrj have DECL_WEAK flag set. */
438138fd1498Szrj && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
438238fd1498Szrj /* Loading a static variable is unduly expensive, but global
438338fd1498Szrj registers aren't expensive. */
438438fd1498Szrj && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
438538fd1498Szrj }
438638fd1498Szrj
438738fd1498Szrj /* Subroutine for fold_truth_andor: determine if an operand is simple enough
438838fd1498Szrj to be evaluated unconditionally.
438938fd1498Szrj I addition to simple_operand_p, we assume that comparisons, conversions,
439038fd1498Szrj and logic-not operations are simple, if their operands are simple, too. */
439138fd1498Szrj
439238fd1498Szrj static bool
simple_operand_p_2(tree exp)439338fd1498Szrj simple_operand_p_2 (tree exp)
439438fd1498Szrj {
439538fd1498Szrj enum tree_code code;
439638fd1498Szrj
439738fd1498Szrj if (TREE_SIDE_EFFECTS (exp)
439838fd1498Szrj || tree_could_trap_p (exp))
439938fd1498Szrj return false;
440038fd1498Szrj
440138fd1498Szrj while (CONVERT_EXPR_P (exp))
440238fd1498Szrj exp = TREE_OPERAND (exp, 0);
440338fd1498Szrj
440438fd1498Szrj code = TREE_CODE (exp);
440538fd1498Szrj
440638fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_comparison)
440738fd1498Szrj return (simple_operand_p (TREE_OPERAND (exp, 0))
440838fd1498Szrj && simple_operand_p (TREE_OPERAND (exp, 1)));
440938fd1498Szrj
441038fd1498Szrj if (code == TRUTH_NOT_EXPR)
441138fd1498Szrj return simple_operand_p_2 (TREE_OPERAND (exp, 0));
441238fd1498Szrj
441338fd1498Szrj return simple_operand_p (exp);
441438fd1498Szrj }
441538fd1498Szrj
441638fd1498Szrj
441738fd1498Szrj /* The following functions are subroutines to fold_range_test and allow it to
441838fd1498Szrj try to change a logical combination of comparisons into a range test.
441938fd1498Szrj
442038fd1498Szrj For example, both
442138fd1498Szrj X == 2 || X == 3 || X == 4 || X == 5
442238fd1498Szrj and
442338fd1498Szrj X >= 2 && X <= 5
442438fd1498Szrj are converted to
442538fd1498Szrj (unsigned) (X - 2) <= 3
442638fd1498Szrj
442738fd1498Szrj We describe each set of comparisons as being either inside or outside
442838fd1498Szrj a range, using a variable named like IN_P, and then describe the
442938fd1498Szrj range with a lower and upper bound. If one of the bounds is omitted,
443038fd1498Szrj it represents either the highest or lowest value of the type.
443138fd1498Szrj
443238fd1498Szrj In the comments below, we represent a range by two numbers in brackets
443338fd1498Szrj preceded by a "+" to designate being inside that range, or a "-" to
443438fd1498Szrj designate being outside that range, so the condition can be inverted by
443538fd1498Szrj flipping the prefix. An omitted bound is represented by a "-". For
443638fd1498Szrj example, "- [-, 10]" means being outside the range starting at the lowest
443738fd1498Szrj possible value and ending at 10, in other words, being greater than 10.
443838fd1498Szrj The range "+ [-, -]" is always true and hence the range "- [-, -]" is
443938fd1498Szrj always false.
444038fd1498Szrj
444138fd1498Szrj We set up things so that the missing bounds are handled in a consistent
444238fd1498Szrj manner so neither a missing bound nor "true" and "false" need to be
444338fd1498Szrj handled using a special case. */
444438fd1498Szrj
444538fd1498Szrj /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
444638fd1498Szrj of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
444738fd1498Szrj and UPPER1_P are nonzero if the respective argument is an upper bound
444838fd1498Szrj and zero for a lower. TYPE, if nonzero, is the type of the result; it
444938fd1498Szrj must be specified for a comparison. ARG1 will be converted to ARG0's
445038fd1498Szrj type if both are specified. */
445138fd1498Szrj
445238fd1498Szrj static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)445338fd1498Szrj range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
445438fd1498Szrj tree arg1, int upper1_p)
445538fd1498Szrj {
445638fd1498Szrj tree tem;
445738fd1498Szrj int result;
445838fd1498Szrj int sgn0, sgn1;
445938fd1498Szrj
446038fd1498Szrj /* If neither arg represents infinity, do the normal operation.
446138fd1498Szrj Else, if not a comparison, return infinity. Else handle the special
446238fd1498Szrj comparison rules. Note that most of the cases below won't occur, but
446338fd1498Szrj are handled for consistency. */
446438fd1498Szrj
446538fd1498Szrj if (arg0 != 0 && arg1 != 0)
446638fd1498Szrj {
446738fd1498Szrj tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
446838fd1498Szrj arg0, fold_convert (TREE_TYPE (arg0), arg1));
446938fd1498Szrj STRIP_NOPS (tem);
447038fd1498Szrj return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
447138fd1498Szrj }
447238fd1498Szrj
447338fd1498Szrj if (TREE_CODE_CLASS (code) != tcc_comparison)
447438fd1498Szrj return 0;
447538fd1498Szrj
447638fd1498Szrj /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
447738fd1498Szrj for neither. In real maths, we cannot assume open ended ranges are
447838fd1498Szrj the same. But, this is computer arithmetic, where numbers are finite.
447938fd1498Szrj We can therefore make the transformation of any unbounded range with
448038fd1498Szrj the value Z, Z being greater than any representable number. This permits
448138fd1498Szrj us to treat unbounded ranges as equal. */
448238fd1498Szrj sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
448338fd1498Szrj sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
448438fd1498Szrj switch (code)
448538fd1498Szrj {
448638fd1498Szrj case EQ_EXPR:
448738fd1498Szrj result = sgn0 == sgn1;
448838fd1498Szrj break;
448938fd1498Szrj case NE_EXPR:
449038fd1498Szrj result = sgn0 != sgn1;
449138fd1498Szrj break;
449238fd1498Szrj case LT_EXPR:
449338fd1498Szrj result = sgn0 < sgn1;
449438fd1498Szrj break;
449538fd1498Szrj case LE_EXPR:
449638fd1498Szrj result = sgn0 <= sgn1;
449738fd1498Szrj break;
449838fd1498Szrj case GT_EXPR:
449938fd1498Szrj result = sgn0 > sgn1;
450038fd1498Szrj break;
450138fd1498Szrj case GE_EXPR:
450238fd1498Szrj result = sgn0 >= sgn1;
450338fd1498Szrj break;
450438fd1498Szrj default:
450538fd1498Szrj gcc_unreachable ();
450638fd1498Szrj }
450738fd1498Szrj
450838fd1498Szrj return constant_boolean_node (result, type);
450938fd1498Szrj }
451038fd1498Szrj
451138fd1498Szrj /* Helper routine for make_range. Perform one step for it, return
451238fd1498Szrj new expression if the loop should continue or NULL_TREE if it should
451338fd1498Szrj stop. */
451438fd1498Szrj
451538fd1498Szrj tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)451638fd1498Szrj make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
451738fd1498Szrj tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
451838fd1498Szrj bool *strict_overflow_p)
451938fd1498Szrj {
452038fd1498Szrj tree arg0_type = TREE_TYPE (arg0);
452138fd1498Szrj tree n_low, n_high, low = *p_low, high = *p_high;
452238fd1498Szrj int in_p = *p_in_p, n_in_p;
452338fd1498Szrj
452438fd1498Szrj switch (code)
452538fd1498Szrj {
452638fd1498Szrj case TRUTH_NOT_EXPR:
452738fd1498Szrj /* We can only do something if the range is testing for zero. */
452838fd1498Szrj if (low == NULL_TREE || high == NULL_TREE
452938fd1498Szrj || ! integer_zerop (low) || ! integer_zerop (high))
453038fd1498Szrj return NULL_TREE;
453138fd1498Szrj *p_in_p = ! in_p;
453238fd1498Szrj return arg0;
453338fd1498Szrj
453438fd1498Szrj case EQ_EXPR: case NE_EXPR:
453538fd1498Szrj case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
453638fd1498Szrj /* We can only do something if the range is testing for zero
453738fd1498Szrj and if the second operand is an integer constant. Note that
453838fd1498Szrj saying something is "in" the range we make is done by
453938fd1498Szrj complementing IN_P since it will set in the initial case of
454038fd1498Szrj being not equal to zero; "out" is leaving it alone. */
454138fd1498Szrj if (low == NULL_TREE || high == NULL_TREE
454238fd1498Szrj || ! integer_zerop (low) || ! integer_zerop (high)
454338fd1498Szrj || TREE_CODE (arg1) != INTEGER_CST)
454438fd1498Szrj return NULL_TREE;
454538fd1498Szrj
454638fd1498Szrj switch (code)
454738fd1498Szrj {
454838fd1498Szrj case NE_EXPR: /* - [c, c] */
454938fd1498Szrj low = high = arg1;
455038fd1498Szrj break;
455138fd1498Szrj case EQ_EXPR: /* + [c, c] */
455238fd1498Szrj in_p = ! in_p, low = high = arg1;
455338fd1498Szrj break;
455438fd1498Szrj case GT_EXPR: /* - [-, c] */
455538fd1498Szrj low = 0, high = arg1;
455638fd1498Szrj break;
455738fd1498Szrj case GE_EXPR: /* + [c, -] */
455838fd1498Szrj in_p = ! in_p, low = arg1, high = 0;
455938fd1498Szrj break;
456038fd1498Szrj case LT_EXPR: /* - [c, -] */
456138fd1498Szrj low = arg1, high = 0;
456238fd1498Szrj break;
456338fd1498Szrj case LE_EXPR: /* + [-, c] */
456438fd1498Szrj in_p = ! in_p, low = 0, high = arg1;
456538fd1498Szrj break;
456638fd1498Szrj default:
456738fd1498Szrj gcc_unreachable ();
456838fd1498Szrj }
456938fd1498Szrj
457038fd1498Szrj /* If this is an unsigned comparison, we also know that EXP is
457138fd1498Szrj greater than or equal to zero. We base the range tests we make
457238fd1498Szrj on that fact, so we record it here so we can parse existing
457338fd1498Szrj range tests. We test arg0_type since often the return type
457438fd1498Szrj of, e.g. EQ_EXPR, is boolean. */
457538fd1498Szrj if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
457638fd1498Szrj {
457738fd1498Szrj if (! merge_ranges (&n_in_p, &n_low, &n_high,
457838fd1498Szrj in_p, low, high, 1,
457938fd1498Szrj build_int_cst (arg0_type, 0),
458038fd1498Szrj NULL_TREE))
458138fd1498Szrj return NULL_TREE;
458238fd1498Szrj
458338fd1498Szrj in_p = n_in_p, low = n_low, high = n_high;
458438fd1498Szrj
458538fd1498Szrj /* If the high bound is missing, but we have a nonzero low
458638fd1498Szrj bound, reverse the range so it goes from zero to the low bound
458738fd1498Szrj minus 1. */
458838fd1498Szrj if (high == 0 && low && ! integer_zerop (low))
458938fd1498Szrj {
459038fd1498Szrj in_p = ! in_p;
459138fd1498Szrj high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
459238fd1498Szrj build_int_cst (TREE_TYPE (low), 1), 0);
459338fd1498Szrj low = build_int_cst (arg0_type, 0);
459438fd1498Szrj }
459538fd1498Szrj }
459638fd1498Szrj
459738fd1498Szrj *p_low = low;
459838fd1498Szrj *p_high = high;
459938fd1498Szrj *p_in_p = in_p;
460038fd1498Szrj return arg0;
460138fd1498Szrj
460238fd1498Szrj case NEGATE_EXPR:
460338fd1498Szrj /* If flag_wrapv and ARG0_TYPE is signed, make sure
460438fd1498Szrj low and high are non-NULL, then normalize will DTRT. */
460538fd1498Szrj if (!TYPE_UNSIGNED (arg0_type)
460638fd1498Szrj && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
460738fd1498Szrj {
460838fd1498Szrj if (low == NULL_TREE)
460938fd1498Szrj low = TYPE_MIN_VALUE (arg0_type);
461038fd1498Szrj if (high == NULL_TREE)
461138fd1498Szrj high = TYPE_MAX_VALUE (arg0_type);
461238fd1498Szrj }
461338fd1498Szrj
461438fd1498Szrj /* (-x) IN [a,b] -> x in [-b, -a] */
461538fd1498Szrj n_low = range_binop (MINUS_EXPR, exp_type,
461638fd1498Szrj build_int_cst (exp_type, 0),
461738fd1498Szrj 0, high, 1);
461838fd1498Szrj n_high = range_binop (MINUS_EXPR, exp_type,
461938fd1498Szrj build_int_cst (exp_type, 0),
462038fd1498Szrj 0, low, 0);
462138fd1498Szrj if (n_high != 0 && TREE_OVERFLOW (n_high))
462238fd1498Szrj return NULL_TREE;
462338fd1498Szrj goto normalize;
462438fd1498Szrj
462538fd1498Szrj case BIT_NOT_EXPR:
462638fd1498Szrj /* ~ X -> -X - 1 */
462738fd1498Szrj return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
462838fd1498Szrj build_int_cst (exp_type, 1));
462938fd1498Szrj
463038fd1498Szrj case PLUS_EXPR:
463138fd1498Szrj case MINUS_EXPR:
463238fd1498Szrj if (TREE_CODE (arg1) != INTEGER_CST)
463338fd1498Szrj return NULL_TREE;
463438fd1498Szrj
463538fd1498Szrj /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
463638fd1498Szrj move a constant to the other side. */
463738fd1498Szrj if (!TYPE_UNSIGNED (arg0_type)
463838fd1498Szrj && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
463938fd1498Szrj return NULL_TREE;
464038fd1498Szrj
464138fd1498Szrj /* If EXP is signed, any overflow in the computation is undefined,
464238fd1498Szrj so we don't worry about it so long as our computations on
464338fd1498Szrj the bounds don't overflow. For unsigned, overflow is defined
464438fd1498Szrj and this is exactly the right thing. */
464538fd1498Szrj n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
464638fd1498Szrj arg0_type, low, 0, arg1, 0);
464738fd1498Szrj n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
464838fd1498Szrj arg0_type, high, 1, arg1, 0);
464938fd1498Szrj if ((n_low != 0 && TREE_OVERFLOW (n_low))
465038fd1498Szrj || (n_high != 0 && TREE_OVERFLOW (n_high)))
465138fd1498Szrj return NULL_TREE;
465238fd1498Szrj
465338fd1498Szrj if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
465438fd1498Szrj *strict_overflow_p = true;
465538fd1498Szrj
465638fd1498Szrj normalize:
465738fd1498Szrj /* Check for an unsigned range which has wrapped around the maximum
465838fd1498Szrj value thus making n_high < n_low, and normalize it. */
465938fd1498Szrj if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
466038fd1498Szrj {
466138fd1498Szrj low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
466238fd1498Szrj build_int_cst (TREE_TYPE (n_high), 1), 0);
466338fd1498Szrj high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
466438fd1498Szrj build_int_cst (TREE_TYPE (n_low), 1), 0);
466538fd1498Szrj
466638fd1498Szrj /* If the range is of the form +/- [ x+1, x ], we won't
466738fd1498Szrj be able to normalize it. But then, it represents the
466838fd1498Szrj whole range or the empty set, so make it
466938fd1498Szrj +/- [ -, - ]. */
467038fd1498Szrj if (tree_int_cst_equal (n_low, low)
467138fd1498Szrj && tree_int_cst_equal (n_high, high))
467238fd1498Szrj low = high = 0;
467338fd1498Szrj else
467438fd1498Szrj in_p = ! in_p;
467538fd1498Szrj }
467638fd1498Szrj else
467738fd1498Szrj low = n_low, high = n_high;
467838fd1498Szrj
467938fd1498Szrj *p_low = low;
468038fd1498Szrj *p_high = high;
468138fd1498Szrj *p_in_p = in_p;
468238fd1498Szrj return arg0;
468338fd1498Szrj
468438fd1498Szrj CASE_CONVERT:
468538fd1498Szrj case NON_LVALUE_EXPR:
468638fd1498Szrj if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
468738fd1498Szrj return NULL_TREE;
468838fd1498Szrj
468938fd1498Szrj if (! INTEGRAL_TYPE_P (arg0_type)
469038fd1498Szrj || (low != 0 && ! int_fits_type_p (low, arg0_type))
469138fd1498Szrj || (high != 0 && ! int_fits_type_p (high, arg0_type)))
469238fd1498Szrj return NULL_TREE;
469338fd1498Szrj
469438fd1498Szrj n_low = low, n_high = high;
469538fd1498Szrj
469638fd1498Szrj if (n_low != 0)
469738fd1498Szrj n_low = fold_convert_loc (loc, arg0_type, n_low);
469838fd1498Szrj
469938fd1498Szrj if (n_high != 0)
470038fd1498Szrj n_high = fold_convert_loc (loc, arg0_type, n_high);
470138fd1498Szrj
470238fd1498Szrj /* If we're converting arg0 from an unsigned type, to exp,
470338fd1498Szrj a signed type, we will be doing the comparison as unsigned.
470438fd1498Szrj The tests above have already verified that LOW and HIGH
470538fd1498Szrj are both positive.
470638fd1498Szrj
470738fd1498Szrj So we have to ensure that we will handle large unsigned
470838fd1498Szrj values the same way that the current signed bounds treat
470938fd1498Szrj negative values. */
471038fd1498Szrj
471138fd1498Szrj if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
471238fd1498Szrj {
471338fd1498Szrj tree high_positive;
471438fd1498Szrj tree equiv_type;
471538fd1498Szrj /* For fixed-point modes, we need to pass the saturating flag
471638fd1498Szrj as the 2nd parameter. */
471738fd1498Szrj if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
471838fd1498Szrj equiv_type
471938fd1498Szrj = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
472038fd1498Szrj TYPE_SATURATING (arg0_type));
472138fd1498Szrj else
472238fd1498Szrj equiv_type
472338fd1498Szrj = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
472438fd1498Szrj
472538fd1498Szrj /* A range without an upper bound is, naturally, unbounded.
472638fd1498Szrj Since convert would have cropped a very large value, use
472738fd1498Szrj the max value for the destination type. */
472838fd1498Szrj high_positive
472938fd1498Szrj = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
473038fd1498Szrj : TYPE_MAX_VALUE (arg0_type);
473138fd1498Szrj
473238fd1498Szrj if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
473338fd1498Szrj high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
473438fd1498Szrj fold_convert_loc (loc, arg0_type,
473538fd1498Szrj high_positive),
473638fd1498Szrj build_int_cst (arg0_type, 1));
473738fd1498Szrj
473838fd1498Szrj /* If the low bound is specified, "and" the range with the
473938fd1498Szrj range for which the original unsigned value will be
474038fd1498Szrj positive. */
474138fd1498Szrj if (low != 0)
474238fd1498Szrj {
474338fd1498Szrj if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
474438fd1498Szrj 1, fold_convert_loc (loc, arg0_type,
474538fd1498Szrj integer_zero_node),
474638fd1498Szrj high_positive))
474738fd1498Szrj return NULL_TREE;
474838fd1498Szrj
474938fd1498Szrj in_p = (n_in_p == in_p);
475038fd1498Szrj }
475138fd1498Szrj else
475238fd1498Szrj {
475338fd1498Szrj /* Otherwise, "or" the range with the range of the input
475438fd1498Szrj that will be interpreted as negative. */
475538fd1498Szrj if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
475638fd1498Szrj 1, fold_convert_loc (loc, arg0_type,
475738fd1498Szrj integer_zero_node),
475838fd1498Szrj high_positive))
475938fd1498Szrj return NULL_TREE;
476038fd1498Szrj
476138fd1498Szrj in_p = (in_p != n_in_p);
476238fd1498Szrj }
476338fd1498Szrj }
476438fd1498Szrj
476538fd1498Szrj *p_low = n_low;
476638fd1498Szrj *p_high = n_high;
476738fd1498Szrj *p_in_p = in_p;
476838fd1498Szrj return arg0;
476938fd1498Szrj
477038fd1498Szrj default:
477138fd1498Szrj return NULL_TREE;
477238fd1498Szrj }
477338fd1498Szrj }
477438fd1498Szrj
477538fd1498Szrj /* Given EXP, a logical expression, set the range it is testing into
477638fd1498Szrj variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
477738fd1498Szrj actually being tested. *PLOW and *PHIGH will be made of the same
477838fd1498Szrj type as the returned expression. If EXP is not a comparison, we
477938fd1498Szrj will most likely not be returning a useful value and range. Set
478038fd1498Szrj *STRICT_OVERFLOW_P to true if the return value is only valid
478138fd1498Szrj because signed overflow is undefined; otherwise, do not change
478238fd1498Szrj *STRICT_OVERFLOW_P. */
478338fd1498Szrj
478438fd1498Szrj tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)478538fd1498Szrj make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
478638fd1498Szrj bool *strict_overflow_p)
478738fd1498Szrj {
478838fd1498Szrj enum tree_code code;
478938fd1498Szrj tree arg0, arg1 = NULL_TREE;
479038fd1498Szrj tree exp_type, nexp;
479138fd1498Szrj int in_p;
479238fd1498Szrj tree low, high;
479338fd1498Szrj location_t loc = EXPR_LOCATION (exp);
479438fd1498Szrj
479538fd1498Szrj /* Start with simply saying "EXP != 0" and then look at the code of EXP
479638fd1498Szrj and see if we can refine the range. Some of the cases below may not
479738fd1498Szrj happen, but it doesn't seem worth worrying about this. We "continue"
479838fd1498Szrj the outer loop when we've changed something; otherwise we "break"
479938fd1498Szrj the switch, which will "break" the while. */
480038fd1498Szrj
480138fd1498Szrj in_p = 0;
480238fd1498Szrj low = high = build_int_cst (TREE_TYPE (exp), 0);
480338fd1498Szrj
480438fd1498Szrj while (1)
480538fd1498Szrj {
480638fd1498Szrj code = TREE_CODE (exp);
480738fd1498Szrj exp_type = TREE_TYPE (exp);
480838fd1498Szrj arg0 = NULL_TREE;
480938fd1498Szrj
481038fd1498Szrj if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
481138fd1498Szrj {
481238fd1498Szrj if (TREE_OPERAND_LENGTH (exp) > 0)
481338fd1498Szrj arg0 = TREE_OPERAND (exp, 0);
481438fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_binary
481538fd1498Szrj || TREE_CODE_CLASS (code) == tcc_comparison
481638fd1498Szrj || (TREE_CODE_CLASS (code) == tcc_expression
481738fd1498Szrj && TREE_OPERAND_LENGTH (exp) > 1))
481838fd1498Szrj arg1 = TREE_OPERAND (exp, 1);
481938fd1498Szrj }
482038fd1498Szrj if (arg0 == NULL_TREE)
482138fd1498Szrj break;
482238fd1498Szrj
482338fd1498Szrj nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
482438fd1498Szrj &high, &in_p, strict_overflow_p);
482538fd1498Szrj if (nexp == NULL_TREE)
482638fd1498Szrj break;
482738fd1498Szrj exp = nexp;
482838fd1498Szrj }
482938fd1498Szrj
483038fd1498Szrj /* If EXP is a constant, we can evaluate whether this is true or false. */
483138fd1498Szrj if (TREE_CODE (exp) == INTEGER_CST)
483238fd1498Szrj {
483338fd1498Szrj in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
483438fd1498Szrj exp, 0, low, 0))
483538fd1498Szrj && integer_onep (range_binop (LE_EXPR, integer_type_node,
483638fd1498Szrj exp, 1, high, 1)));
483738fd1498Szrj low = high = 0;
483838fd1498Szrj exp = 0;
483938fd1498Szrj }
484038fd1498Szrj
484138fd1498Szrj *pin_p = in_p, *plow = low, *phigh = high;
484238fd1498Szrj return exp;
484338fd1498Szrj }
484438fd1498Szrj
484538fd1498Szrj /* Returns TRUE if [LOW, HIGH] range check can be optimized to
484638fd1498Szrj a bitwise check i.e. when
484738fd1498Szrj LOW == 0xXX...X00...0
484838fd1498Szrj HIGH == 0xXX...X11...1
484938fd1498Szrj Return corresponding mask in MASK and stem in VALUE. */
485038fd1498Szrj
485138fd1498Szrj static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)485238fd1498Szrj maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
485338fd1498Szrj tree *value)
485438fd1498Szrj {
485538fd1498Szrj if (TREE_CODE (low) != INTEGER_CST
485638fd1498Szrj || TREE_CODE (high) != INTEGER_CST)
485738fd1498Szrj return false;
485838fd1498Szrj
485938fd1498Szrj unsigned prec = TYPE_PRECISION (type);
486038fd1498Szrj wide_int lo = wi::to_wide (low, prec);
486138fd1498Szrj wide_int hi = wi::to_wide (high, prec);
486238fd1498Szrj
486338fd1498Szrj wide_int end_mask = lo ^ hi;
486438fd1498Szrj if ((end_mask & (end_mask + 1)) != 0
486538fd1498Szrj || (lo & end_mask) != 0)
486638fd1498Szrj return false;
486738fd1498Szrj
486838fd1498Szrj wide_int stem_mask = ~end_mask;
486938fd1498Szrj wide_int stem = lo & stem_mask;
487038fd1498Szrj if (stem != (hi & stem_mask))
487138fd1498Szrj return false;
487238fd1498Szrj
487338fd1498Szrj *mask = wide_int_to_tree (type, stem_mask);
487438fd1498Szrj *value = wide_int_to_tree (type, stem);
487538fd1498Szrj
487638fd1498Szrj return true;
487738fd1498Szrj }
487838fd1498Szrj
487938fd1498Szrj /* Helper routine for build_range_check and match.pd. Return the type to
488038fd1498Szrj perform the check or NULL if it shouldn't be optimized. */
488138fd1498Szrj
488238fd1498Szrj tree
range_check_type(tree etype)488338fd1498Szrj range_check_type (tree etype)
488438fd1498Szrj {
488538fd1498Szrj /* First make sure that arithmetics in this type is valid, then make sure
488638fd1498Szrj that it wraps around. */
488738fd1498Szrj if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
488838fd1498Szrj etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
488938fd1498Szrj TYPE_UNSIGNED (etype));
489038fd1498Szrj
489138fd1498Szrj if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
489238fd1498Szrj {
489338fd1498Szrj tree utype, minv, maxv;
489438fd1498Szrj
489538fd1498Szrj /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
489638fd1498Szrj for the type in question, as we rely on this here. */
489738fd1498Szrj utype = unsigned_type_for (etype);
489838fd1498Szrj maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
489938fd1498Szrj maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
490038fd1498Szrj build_int_cst (TREE_TYPE (maxv), 1), 1);
490138fd1498Szrj minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
490238fd1498Szrj
490338fd1498Szrj if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
490438fd1498Szrj minv, 1, maxv, 1)))
490538fd1498Szrj etype = utype;
490638fd1498Szrj else
490738fd1498Szrj return NULL_TREE;
490838fd1498Szrj }
490938fd1498Szrj return etype;
491038fd1498Szrj }
491138fd1498Szrj
491238fd1498Szrj /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
491338fd1498Szrj type, TYPE, return an expression to test if EXP is in (or out of, depending
491438fd1498Szrj on IN_P) the range. Return 0 if the test couldn't be created. */
491538fd1498Szrj
491638fd1498Szrj tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)491738fd1498Szrj build_range_check (location_t loc, tree type, tree exp, int in_p,
491838fd1498Szrj tree low, tree high)
491938fd1498Szrj {
492038fd1498Szrj tree etype = TREE_TYPE (exp), mask, value;
492138fd1498Szrj
492238fd1498Szrj /* Disable this optimization for function pointer expressions
492338fd1498Szrj on targets that require function pointer canonicalization. */
492438fd1498Szrj if (targetm.have_canonicalize_funcptr_for_compare ()
492558e805e6Szrj && POINTER_TYPE_P (etype)
492658e805e6Szrj && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
492738fd1498Szrj return NULL_TREE;
492838fd1498Szrj
492938fd1498Szrj if (! in_p)
493038fd1498Szrj {
493138fd1498Szrj value = build_range_check (loc, type, exp, 1, low, high);
493238fd1498Szrj if (value != 0)
493338fd1498Szrj return invert_truthvalue_loc (loc, value);
493438fd1498Szrj
493538fd1498Szrj return 0;
493638fd1498Szrj }
493738fd1498Szrj
493838fd1498Szrj if (low == 0 && high == 0)
493938fd1498Szrj return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
494038fd1498Szrj
494138fd1498Szrj if (low == 0)
494238fd1498Szrj return fold_build2_loc (loc, LE_EXPR, type, exp,
494338fd1498Szrj fold_convert_loc (loc, etype, high));
494438fd1498Szrj
494538fd1498Szrj if (high == 0)
494638fd1498Szrj return fold_build2_loc (loc, GE_EXPR, type, exp,
494738fd1498Szrj fold_convert_loc (loc, etype, low));
494838fd1498Szrj
494938fd1498Szrj if (operand_equal_p (low, high, 0))
495038fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, exp,
495138fd1498Szrj fold_convert_loc (loc, etype, low));
495238fd1498Szrj
495338fd1498Szrj if (TREE_CODE (exp) == BIT_AND_EXPR
495438fd1498Szrj && maskable_range_p (low, high, etype, &mask, &value))
495538fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type,
495638fd1498Szrj fold_build2_loc (loc, BIT_AND_EXPR, etype,
495738fd1498Szrj exp, mask),
495838fd1498Szrj value);
495938fd1498Szrj
496038fd1498Szrj if (integer_zerop (low))
496138fd1498Szrj {
496238fd1498Szrj if (! TYPE_UNSIGNED (etype))
496338fd1498Szrj {
496438fd1498Szrj etype = unsigned_type_for (etype);
496538fd1498Szrj high = fold_convert_loc (loc, etype, high);
496638fd1498Szrj exp = fold_convert_loc (loc, etype, exp);
496738fd1498Szrj }
496838fd1498Szrj return build_range_check (loc, type, exp, 1, 0, high);
496938fd1498Szrj }
497038fd1498Szrj
497138fd1498Szrj /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
497238fd1498Szrj if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
497338fd1498Szrj {
497438fd1498Szrj int prec = TYPE_PRECISION (etype);
497538fd1498Szrj
497638fd1498Szrj if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
497738fd1498Szrj {
497838fd1498Szrj if (TYPE_UNSIGNED (etype))
497938fd1498Szrj {
498038fd1498Szrj tree signed_etype = signed_type_for (etype);
498138fd1498Szrj if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
498238fd1498Szrj etype
498338fd1498Szrj = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
498438fd1498Szrj else
498538fd1498Szrj etype = signed_etype;
498638fd1498Szrj exp = fold_convert_loc (loc, etype, exp);
498738fd1498Szrj }
498838fd1498Szrj return fold_build2_loc (loc, GT_EXPR, type, exp,
498938fd1498Szrj build_int_cst (etype, 0));
499038fd1498Szrj }
499138fd1498Szrj }
499238fd1498Szrj
499338fd1498Szrj /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
499438fd1498Szrj This requires wrap-around arithmetics for the type of the expression. */
499538fd1498Szrj etype = range_check_type (etype);
499638fd1498Szrj if (etype == NULL_TREE)
499738fd1498Szrj return NULL_TREE;
499838fd1498Szrj
499938fd1498Szrj if (POINTER_TYPE_P (etype))
500038fd1498Szrj etype = unsigned_type_for (etype);
500138fd1498Szrj
500238fd1498Szrj high = fold_convert_loc (loc, etype, high);
500338fd1498Szrj low = fold_convert_loc (loc, etype, low);
500438fd1498Szrj exp = fold_convert_loc (loc, etype, exp);
500538fd1498Szrj
500638fd1498Szrj value = const_binop (MINUS_EXPR, high, low);
500738fd1498Szrj
500838fd1498Szrj if (value != 0 && !TREE_OVERFLOW (value))
500938fd1498Szrj return build_range_check (loc, type,
501038fd1498Szrj fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
501138fd1498Szrj 1, build_int_cst (etype, 0), value);
501238fd1498Szrj
501338fd1498Szrj return 0;
501438fd1498Szrj }
501538fd1498Szrj
501638fd1498Szrj /* Return the predecessor of VAL in its type, handling the infinite case. */
501738fd1498Szrj
501838fd1498Szrj static tree
range_predecessor(tree val)501938fd1498Szrj range_predecessor (tree val)
502038fd1498Szrj {
502138fd1498Szrj tree type = TREE_TYPE (val);
502238fd1498Szrj
502338fd1498Szrj if (INTEGRAL_TYPE_P (type)
502438fd1498Szrj && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
502538fd1498Szrj return 0;
502638fd1498Szrj else
502738fd1498Szrj return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
502838fd1498Szrj build_int_cst (TREE_TYPE (val), 1), 0);
502938fd1498Szrj }
503038fd1498Szrj
503138fd1498Szrj /* Return the successor of VAL in its type, handling the infinite case. */
503238fd1498Szrj
503338fd1498Szrj static tree
range_successor(tree val)503438fd1498Szrj range_successor (tree val)
503538fd1498Szrj {
503638fd1498Szrj tree type = TREE_TYPE (val);
503738fd1498Szrj
503838fd1498Szrj if (INTEGRAL_TYPE_P (type)
503938fd1498Szrj && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
504038fd1498Szrj return 0;
504138fd1498Szrj else
504238fd1498Szrj return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
504338fd1498Szrj build_int_cst (TREE_TYPE (val), 1), 0);
504438fd1498Szrj }
504538fd1498Szrj
504638fd1498Szrj /* Given two ranges, see if we can merge them into one. Return 1 if we
504738fd1498Szrj can, 0 if we can't. Set the output range into the specified parameters. */
504838fd1498Szrj
504938fd1498Szrj bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)505038fd1498Szrj merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
505138fd1498Szrj tree high0, int in1_p, tree low1, tree high1)
505238fd1498Szrj {
505338fd1498Szrj int no_overlap;
505438fd1498Szrj int subset;
505538fd1498Szrj int temp;
505638fd1498Szrj tree tem;
505738fd1498Szrj int in_p;
505838fd1498Szrj tree low, high;
505938fd1498Szrj int lowequal = ((low0 == 0 && low1 == 0)
506038fd1498Szrj || integer_onep (range_binop (EQ_EXPR, integer_type_node,
506138fd1498Szrj low0, 0, low1, 0)));
506238fd1498Szrj int highequal = ((high0 == 0 && high1 == 0)
506338fd1498Szrj || integer_onep (range_binop (EQ_EXPR, integer_type_node,
506438fd1498Szrj high0, 1, high1, 1)));
506538fd1498Szrj
506638fd1498Szrj /* Make range 0 be the range that starts first, or ends last if they
506738fd1498Szrj start at the same value. Swap them if it isn't. */
506838fd1498Szrj if (integer_onep (range_binop (GT_EXPR, integer_type_node,
506938fd1498Szrj low0, 0, low1, 0))
507038fd1498Szrj || (lowequal
507138fd1498Szrj && integer_onep (range_binop (GT_EXPR, integer_type_node,
507238fd1498Szrj high1, 1, high0, 1))))
507338fd1498Szrj {
507438fd1498Szrj temp = in0_p, in0_p = in1_p, in1_p = temp;
507538fd1498Szrj tem = low0, low0 = low1, low1 = tem;
507638fd1498Szrj tem = high0, high0 = high1, high1 = tem;
507738fd1498Szrj }
507838fd1498Szrj
507938fd1498Szrj /* Now flag two cases, whether the ranges are disjoint or whether the
508038fd1498Szrj second range is totally subsumed in the first. Note that the tests
508138fd1498Szrj below are simplified by the ones above. */
508238fd1498Szrj no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
508338fd1498Szrj high0, 1, low1, 0));
508438fd1498Szrj subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
508538fd1498Szrj high1, 1, high0, 1));
508638fd1498Szrj
508738fd1498Szrj /* We now have four cases, depending on whether we are including or
508838fd1498Szrj excluding the two ranges. */
508938fd1498Szrj if (in0_p && in1_p)
509038fd1498Szrj {
509138fd1498Szrj /* If they don't overlap, the result is false. If the second range
509238fd1498Szrj is a subset it is the result. Otherwise, the range is from the start
509338fd1498Szrj of the second to the end of the first. */
509438fd1498Szrj if (no_overlap)
509538fd1498Szrj in_p = 0, low = high = 0;
509638fd1498Szrj else if (subset)
509738fd1498Szrj in_p = 1, low = low1, high = high1;
509838fd1498Szrj else
509938fd1498Szrj in_p = 1, low = low1, high = high0;
510038fd1498Szrj }
510138fd1498Szrj
510238fd1498Szrj else if (in0_p && ! in1_p)
510338fd1498Szrj {
510438fd1498Szrj /* If they don't overlap, the result is the first range. If they are
510538fd1498Szrj equal, the result is false. If the second range is a subset of the
510638fd1498Szrj first, and the ranges begin at the same place, we go from just after
510738fd1498Szrj the end of the second range to the end of the first. If the second
510838fd1498Szrj range is not a subset of the first, or if it is a subset and both
510938fd1498Szrj ranges end at the same place, the range starts at the start of the
511038fd1498Szrj first range and ends just before the second range.
511138fd1498Szrj Otherwise, we can't describe this as a single range. */
511238fd1498Szrj if (no_overlap)
511338fd1498Szrj in_p = 1, low = low0, high = high0;
511438fd1498Szrj else if (lowequal && highequal)
511538fd1498Szrj in_p = 0, low = high = 0;
511638fd1498Szrj else if (subset && lowequal)
511738fd1498Szrj {
511838fd1498Szrj low = range_successor (high1);
511938fd1498Szrj high = high0;
512038fd1498Szrj in_p = 1;
512138fd1498Szrj if (low == 0)
512238fd1498Szrj {
512338fd1498Szrj /* We are in the weird situation where high0 > high1 but
512438fd1498Szrj high1 has no successor. Punt. */
512538fd1498Szrj return 0;
512638fd1498Szrj }
512738fd1498Szrj }
512838fd1498Szrj else if (! subset || highequal)
512938fd1498Szrj {
513038fd1498Szrj low = low0;
513138fd1498Szrj high = range_predecessor (low1);
513238fd1498Szrj in_p = 1;
513338fd1498Szrj if (high == 0)
513438fd1498Szrj {
513538fd1498Szrj /* low0 < low1 but low1 has no predecessor. Punt. */
513638fd1498Szrj return 0;
513738fd1498Szrj }
513838fd1498Szrj }
513938fd1498Szrj else
514038fd1498Szrj return 0;
514138fd1498Szrj }
514238fd1498Szrj
514338fd1498Szrj else if (! in0_p && in1_p)
514438fd1498Szrj {
514538fd1498Szrj /* If they don't overlap, the result is the second range. If the second
514638fd1498Szrj is a subset of the first, the result is false. Otherwise,
514738fd1498Szrj the range starts just after the first range and ends at the
514838fd1498Szrj end of the second. */
514938fd1498Szrj if (no_overlap)
515038fd1498Szrj in_p = 1, low = low1, high = high1;
515138fd1498Szrj else if (subset || highequal)
515238fd1498Szrj in_p = 0, low = high = 0;
515338fd1498Szrj else
515438fd1498Szrj {
515538fd1498Szrj low = range_successor (high0);
515638fd1498Szrj high = high1;
515738fd1498Szrj in_p = 1;
515838fd1498Szrj if (low == 0)
515938fd1498Szrj {
516038fd1498Szrj /* high1 > high0 but high0 has no successor. Punt. */
516138fd1498Szrj return 0;
516238fd1498Szrj }
516338fd1498Szrj }
516438fd1498Szrj }
516538fd1498Szrj
516638fd1498Szrj else
516738fd1498Szrj {
516838fd1498Szrj /* The case where we are excluding both ranges. Here the complex case
516938fd1498Szrj is if they don't overlap. In that case, the only time we have a
517038fd1498Szrj range is if they are adjacent. If the second is a subset of the
517138fd1498Szrj first, the result is the first. Otherwise, the range to exclude
517238fd1498Szrj starts at the beginning of the first range and ends at the end of the
517338fd1498Szrj second. */
517438fd1498Szrj if (no_overlap)
517538fd1498Szrj {
517638fd1498Szrj if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
517738fd1498Szrj range_successor (high0),
517838fd1498Szrj 1, low1, 0)))
517938fd1498Szrj in_p = 0, low = low0, high = high1;
518038fd1498Szrj else
518138fd1498Szrj {
518238fd1498Szrj /* Canonicalize - [min, x] into - [-, x]. */
518338fd1498Szrj if (low0 && TREE_CODE (low0) == INTEGER_CST)
518438fd1498Szrj switch (TREE_CODE (TREE_TYPE (low0)))
518538fd1498Szrj {
518638fd1498Szrj case ENUMERAL_TYPE:
518738fd1498Szrj if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
518838fd1498Szrj GET_MODE_BITSIZE
518938fd1498Szrj (TYPE_MODE (TREE_TYPE (low0)))))
519038fd1498Szrj break;
519138fd1498Szrj /* FALLTHROUGH */
519238fd1498Szrj case INTEGER_TYPE:
519338fd1498Szrj if (tree_int_cst_equal (low0,
519438fd1498Szrj TYPE_MIN_VALUE (TREE_TYPE (low0))))
519538fd1498Szrj low0 = 0;
519638fd1498Szrj break;
519738fd1498Szrj case POINTER_TYPE:
519838fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (low0))
519938fd1498Szrj && integer_zerop (low0))
520038fd1498Szrj low0 = 0;
520138fd1498Szrj break;
520238fd1498Szrj default:
520338fd1498Szrj break;
520438fd1498Szrj }
520538fd1498Szrj
520638fd1498Szrj /* Canonicalize - [x, max] into - [x, -]. */
520738fd1498Szrj if (high1 && TREE_CODE (high1) == INTEGER_CST)
520838fd1498Szrj switch (TREE_CODE (TREE_TYPE (high1)))
520938fd1498Szrj {
521038fd1498Szrj case ENUMERAL_TYPE:
521138fd1498Szrj if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
521238fd1498Szrj GET_MODE_BITSIZE
521338fd1498Szrj (TYPE_MODE (TREE_TYPE (high1)))))
521438fd1498Szrj break;
521538fd1498Szrj /* FALLTHROUGH */
521638fd1498Szrj case INTEGER_TYPE:
521738fd1498Szrj if (tree_int_cst_equal (high1,
521838fd1498Szrj TYPE_MAX_VALUE (TREE_TYPE (high1))))
521938fd1498Szrj high1 = 0;
522038fd1498Szrj break;
522138fd1498Szrj case POINTER_TYPE:
522238fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (high1))
522338fd1498Szrj && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
522438fd1498Szrj high1, 1,
522538fd1498Szrj build_int_cst (TREE_TYPE (high1), 1),
522638fd1498Szrj 1)))
522738fd1498Szrj high1 = 0;
522838fd1498Szrj break;
522938fd1498Szrj default:
523038fd1498Szrj break;
523138fd1498Szrj }
523238fd1498Szrj
523338fd1498Szrj /* The ranges might be also adjacent between the maximum and
523438fd1498Szrj minimum values of the given type. For
523538fd1498Szrj - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
523638fd1498Szrj return + [x + 1, y - 1]. */
523738fd1498Szrj if (low0 == 0 && high1 == 0)
523838fd1498Szrj {
523938fd1498Szrj low = range_successor (high0);
524038fd1498Szrj high = range_predecessor (low1);
524138fd1498Szrj if (low == 0 || high == 0)
524238fd1498Szrj return 0;
524338fd1498Szrj
524438fd1498Szrj in_p = 1;
524538fd1498Szrj }
524638fd1498Szrj else
524738fd1498Szrj return 0;
524838fd1498Szrj }
524938fd1498Szrj }
525038fd1498Szrj else if (subset)
525138fd1498Szrj in_p = 0, low = low0, high = high0;
525238fd1498Szrj else
525338fd1498Szrj in_p = 0, low = low0, high = high1;
525438fd1498Szrj }
525538fd1498Szrj
525638fd1498Szrj *pin_p = in_p, *plow = low, *phigh = high;
525738fd1498Szrj return 1;
525838fd1498Szrj }
525938fd1498Szrj
526038fd1498Szrj
526138fd1498Szrj /* Subroutine of fold, looking inside expressions of the form
526238fd1498Szrj A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
526338fd1498Szrj of the COND_EXPR. This function is being used also to optimize
526438fd1498Szrj A op B ? C : A, by reversing the comparison first.
526538fd1498Szrj
526638fd1498Szrj Return a folded expression whose code is not a COND_EXPR
526738fd1498Szrj anymore, or NULL_TREE if no folding opportunity is found. */
526838fd1498Szrj
526938fd1498Szrj static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)527038fd1498Szrj fold_cond_expr_with_comparison (location_t loc, tree type,
527138fd1498Szrj tree arg0, tree arg1, tree arg2)
527238fd1498Szrj {
527338fd1498Szrj enum tree_code comp_code = TREE_CODE (arg0);
527438fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
527538fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
527638fd1498Szrj tree arg1_type = TREE_TYPE (arg1);
527738fd1498Szrj tree tem;
527838fd1498Szrj
527938fd1498Szrj STRIP_NOPS (arg1);
528038fd1498Szrj STRIP_NOPS (arg2);
528138fd1498Szrj
528238fd1498Szrj /* If we have A op 0 ? A : -A, consider applying the following
528338fd1498Szrj transformations:
528438fd1498Szrj
528538fd1498Szrj A == 0? A : -A same as -A
528638fd1498Szrj A != 0? A : -A same as A
528738fd1498Szrj A >= 0? A : -A same as abs (A)
528838fd1498Szrj A > 0? A : -A same as abs (A)
528938fd1498Szrj A <= 0? A : -A same as -abs (A)
529038fd1498Szrj A < 0? A : -A same as -abs (A)
529138fd1498Szrj
529238fd1498Szrj None of these transformations work for modes with signed
529338fd1498Szrj zeros. If A is +/-0, the first two transformations will
529438fd1498Szrj change the sign of the result (from +0 to -0, or vice
529538fd1498Szrj versa). The last four will fix the sign of the result,
529638fd1498Szrj even though the original expressions could be positive or
529738fd1498Szrj negative, depending on the sign of A.
529838fd1498Szrj
529938fd1498Szrj Note that all these transformations are correct if A is
530038fd1498Szrj NaN, since the two alternatives (A and -A) are also NaNs. */
530138fd1498Szrj if (!HONOR_SIGNED_ZEROS (element_mode (type))
530238fd1498Szrj && (FLOAT_TYPE_P (TREE_TYPE (arg01))
530338fd1498Szrj ? real_zerop (arg01)
530438fd1498Szrj : integer_zerop (arg01))
530538fd1498Szrj && ((TREE_CODE (arg2) == NEGATE_EXPR
530638fd1498Szrj && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
530738fd1498Szrj /* In the case that A is of the form X-Y, '-A' (arg2) may
530838fd1498Szrj have already been folded to Y-X, check for that. */
530938fd1498Szrj || (TREE_CODE (arg1) == MINUS_EXPR
531038fd1498Szrj && TREE_CODE (arg2) == MINUS_EXPR
531138fd1498Szrj && operand_equal_p (TREE_OPERAND (arg1, 0),
531238fd1498Szrj TREE_OPERAND (arg2, 1), 0)
531338fd1498Szrj && operand_equal_p (TREE_OPERAND (arg1, 1),
531438fd1498Szrj TREE_OPERAND (arg2, 0), 0))))
531538fd1498Szrj switch (comp_code)
531638fd1498Szrj {
531738fd1498Szrj case EQ_EXPR:
531838fd1498Szrj case UNEQ_EXPR:
531938fd1498Szrj tem = fold_convert_loc (loc, arg1_type, arg1);
532038fd1498Szrj return fold_convert_loc (loc, type, negate_expr (tem));
532138fd1498Szrj case NE_EXPR:
532238fd1498Szrj case LTGT_EXPR:
532338fd1498Szrj return fold_convert_loc (loc, type, arg1);
532438fd1498Szrj case UNGE_EXPR:
532538fd1498Szrj case UNGT_EXPR:
532638fd1498Szrj if (flag_trapping_math)
532738fd1498Szrj break;
532838fd1498Szrj /* Fall through. */
532938fd1498Szrj case GE_EXPR:
533038fd1498Szrj case GT_EXPR:
533138fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
533238fd1498Szrj break;
533338fd1498Szrj tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
533438fd1498Szrj return fold_convert_loc (loc, type, tem);
533538fd1498Szrj case UNLE_EXPR:
533638fd1498Szrj case UNLT_EXPR:
533738fd1498Szrj if (flag_trapping_math)
533838fd1498Szrj break;
533938fd1498Szrj /* FALLTHRU */
534038fd1498Szrj case LE_EXPR:
534138fd1498Szrj case LT_EXPR:
534238fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
534338fd1498Szrj break;
534438fd1498Szrj tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
534538fd1498Szrj return negate_expr (fold_convert_loc (loc, type, tem));
534638fd1498Szrj default:
534738fd1498Szrj gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
534838fd1498Szrj break;
534938fd1498Szrj }
535038fd1498Szrj
535138fd1498Szrj /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
535238fd1498Szrj A == 0 ? A : 0 is always 0 unless A is -0. Note that
535338fd1498Szrj both transformations are correct when A is NaN: A != 0
535438fd1498Szrj is then true, and A == 0 is false. */
535538fd1498Szrj
535638fd1498Szrj if (!HONOR_SIGNED_ZEROS (element_mode (type))
535738fd1498Szrj && integer_zerop (arg01) && integer_zerop (arg2))
535838fd1498Szrj {
535938fd1498Szrj if (comp_code == NE_EXPR)
536038fd1498Szrj return fold_convert_loc (loc, type, arg1);
536138fd1498Szrj else if (comp_code == EQ_EXPR)
536238fd1498Szrj return build_zero_cst (type);
536338fd1498Szrj }
536438fd1498Szrj
536538fd1498Szrj /* Try some transformations of A op B ? A : B.
536638fd1498Szrj
536738fd1498Szrj A == B? A : B same as B
536838fd1498Szrj A != B? A : B same as A
536938fd1498Szrj A >= B? A : B same as max (A, B)
537038fd1498Szrj A > B? A : B same as max (B, A)
537138fd1498Szrj A <= B? A : B same as min (A, B)
537238fd1498Szrj A < B? A : B same as min (B, A)
537338fd1498Szrj
537438fd1498Szrj As above, these transformations don't work in the presence
537538fd1498Szrj of signed zeros. For example, if A and B are zeros of
537638fd1498Szrj opposite sign, the first two transformations will change
537738fd1498Szrj the sign of the result. In the last four, the original
537838fd1498Szrj expressions give different results for (A=+0, B=-0) and
537938fd1498Szrj (A=-0, B=+0), but the transformed expressions do not.
538038fd1498Szrj
538138fd1498Szrj The first two transformations are correct if either A or B
538238fd1498Szrj is a NaN. In the first transformation, the condition will
538338fd1498Szrj be false, and B will indeed be chosen. In the case of the
538438fd1498Szrj second transformation, the condition A != B will be true,
538538fd1498Szrj and A will be chosen.
538638fd1498Szrj
538738fd1498Szrj The conversions to max() and min() are not correct if B is
538838fd1498Szrj a number and A is not. The conditions in the original
538938fd1498Szrj expressions will be false, so all four give B. The min()
539038fd1498Szrj and max() versions would give a NaN instead. */
539138fd1498Szrj if (!HONOR_SIGNED_ZEROS (element_mode (type))
539238fd1498Szrj && operand_equal_for_comparison_p (arg01, arg2)
539338fd1498Szrj /* Avoid these transformations if the COND_EXPR may be used
539438fd1498Szrj as an lvalue in the C++ front-end. PR c++/19199. */
539538fd1498Szrj && (in_gimple_form
539638fd1498Szrj || VECTOR_TYPE_P (type)
539738fd1498Szrj || (! lang_GNU_CXX ()
539838fd1498Szrj && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
539938fd1498Szrj || ! maybe_lvalue_p (arg1)
540038fd1498Szrj || ! maybe_lvalue_p (arg2)))
540138fd1498Szrj {
540238fd1498Szrj tree comp_op0 = arg00;
540338fd1498Szrj tree comp_op1 = arg01;
540438fd1498Szrj tree comp_type = TREE_TYPE (comp_op0);
540538fd1498Szrj
540638fd1498Szrj switch (comp_code)
540738fd1498Szrj {
540838fd1498Szrj case EQ_EXPR:
540938fd1498Szrj return fold_convert_loc (loc, type, arg2);
541038fd1498Szrj case NE_EXPR:
541138fd1498Szrj return fold_convert_loc (loc, type, arg1);
541238fd1498Szrj case LE_EXPR:
541338fd1498Szrj case LT_EXPR:
541438fd1498Szrj case UNLE_EXPR:
541538fd1498Szrj case UNLT_EXPR:
541638fd1498Szrj /* In C++ a ?: expression can be an lvalue, so put the
541738fd1498Szrj operand which will be used if they are equal first
541838fd1498Szrj so that we can convert this back to the
541938fd1498Szrj corresponding COND_EXPR. */
542038fd1498Szrj if (!HONOR_NANS (arg1))
542138fd1498Szrj {
542238fd1498Szrj comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
542338fd1498Szrj comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
542438fd1498Szrj tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
542538fd1498Szrj ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
542638fd1498Szrj : fold_build2_loc (loc, MIN_EXPR, comp_type,
542738fd1498Szrj comp_op1, comp_op0);
542838fd1498Szrj return fold_convert_loc (loc, type, tem);
542938fd1498Szrj }
543038fd1498Szrj break;
543138fd1498Szrj case GE_EXPR:
543238fd1498Szrj case GT_EXPR:
543338fd1498Szrj case UNGE_EXPR:
543438fd1498Szrj case UNGT_EXPR:
543538fd1498Szrj if (!HONOR_NANS (arg1))
543638fd1498Szrj {
543738fd1498Szrj comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
543838fd1498Szrj comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
543938fd1498Szrj tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
544038fd1498Szrj ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
544138fd1498Szrj : fold_build2_loc (loc, MAX_EXPR, comp_type,
544238fd1498Szrj comp_op1, comp_op0);
544338fd1498Szrj return fold_convert_loc (loc, type, tem);
544438fd1498Szrj }
544538fd1498Szrj break;
544638fd1498Szrj case UNEQ_EXPR:
544738fd1498Szrj if (!HONOR_NANS (arg1))
544838fd1498Szrj return fold_convert_loc (loc, type, arg2);
544938fd1498Szrj break;
545038fd1498Szrj case LTGT_EXPR:
545138fd1498Szrj if (!HONOR_NANS (arg1))
545238fd1498Szrj return fold_convert_loc (loc, type, arg1);
545338fd1498Szrj break;
545438fd1498Szrj default:
545538fd1498Szrj gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
545638fd1498Szrj break;
545738fd1498Szrj }
545838fd1498Szrj }
545938fd1498Szrj
546038fd1498Szrj return NULL_TREE;
546138fd1498Szrj }
546238fd1498Szrj
546338fd1498Szrj
546438fd1498Szrj
546538fd1498Szrj #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
546638fd1498Szrj #define LOGICAL_OP_NON_SHORT_CIRCUIT \
546738fd1498Szrj (BRANCH_COST (optimize_function_for_speed_p (cfun), \
546838fd1498Szrj false) >= 2)
546938fd1498Szrj #endif
547038fd1498Szrj
547138fd1498Szrj /* EXP is some logical combination of boolean tests. See if we can
547238fd1498Szrj merge it into some range test. Return the new tree if so. */
547338fd1498Szrj
547438fd1498Szrj static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)547538fd1498Szrj fold_range_test (location_t loc, enum tree_code code, tree type,
547638fd1498Szrj tree op0, tree op1)
547738fd1498Szrj {
547838fd1498Szrj int or_op = (code == TRUTH_ORIF_EXPR
547938fd1498Szrj || code == TRUTH_OR_EXPR);
548038fd1498Szrj int in0_p, in1_p, in_p;
548138fd1498Szrj tree low0, low1, low, high0, high1, high;
548238fd1498Szrj bool strict_overflow_p = false;
548338fd1498Szrj tree tem, lhs, rhs;
548438fd1498Szrj const char * const warnmsg = G_("assuming signed overflow does not occur "
548538fd1498Szrj "when simplifying range test");
548638fd1498Szrj
548738fd1498Szrj if (!INTEGRAL_TYPE_P (type))
548838fd1498Szrj return 0;
548938fd1498Szrj
549038fd1498Szrj lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
549138fd1498Szrj rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
549238fd1498Szrj
549338fd1498Szrj /* If this is an OR operation, invert both sides; we will invert
549438fd1498Szrj again at the end. */
549538fd1498Szrj if (or_op)
549638fd1498Szrj in0_p = ! in0_p, in1_p = ! in1_p;
549738fd1498Szrj
549838fd1498Szrj /* If both expressions are the same, if we can merge the ranges, and we
549938fd1498Szrj can build the range test, return it or it inverted. If one of the
550038fd1498Szrj ranges is always true or always false, consider it to be the same
550138fd1498Szrj expression as the other. */
550238fd1498Szrj if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
550338fd1498Szrj && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
550438fd1498Szrj in1_p, low1, high1)
550538fd1498Szrj && (tem = (build_range_check (loc, type,
550638fd1498Szrj lhs != 0 ? lhs
550738fd1498Szrj : rhs != 0 ? rhs : integer_zero_node,
550838fd1498Szrj in_p, low, high))) != 0)
550938fd1498Szrj {
551038fd1498Szrj if (strict_overflow_p)
551138fd1498Szrj fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
551238fd1498Szrj return or_op ? invert_truthvalue_loc (loc, tem) : tem;
551338fd1498Szrj }
551438fd1498Szrj
551538fd1498Szrj /* On machines where the branch cost is expensive, if this is a
551638fd1498Szrj short-circuited branch and the underlying object on both sides
551738fd1498Szrj is the same, make a non-short-circuit operation. */
5518*e215fc28Szrj bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5519*e215fc28Szrj if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
5520*e215fc28Szrj logical_op_non_short_circuit
5521*e215fc28Szrj = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
5522*e215fc28Szrj if (logical_op_non_short_circuit
552338fd1498Szrj && !flag_sanitize_coverage
552438fd1498Szrj && lhs != 0 && rhs != 0
5525*e215fc28Szrj && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
552638fd1498Szrj && operand_equal_p (lhs, rhs, 0))
552738fd1498Szrj {
552838fd1498Szrj /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
552938fd1498Szrj unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
553038fd1498Szrj which cases we can't do this. */
553138fd1498Szrj if (simple_operand_p (lhs))
553238fd1498Szrj return build2_loc (loc, code == TRUTH_ANDIF_EXPR
553338fd1498Szrj ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
553438fd1498Szrj type, op0, op1);
553538fd1498Szrj
553638fd1498Szrj else if (!lang_hooks.decls.global_bindings_p ()
553738fd1498Szrj && !CONTAINS_PLACEHOLDER_P (lhs))
553838fd1498Szrj {
553938fd1498Szrj tree common = save_expr (lhs);
554038fd1498Szrj
554138fd1498Szrj if ((lhs = build_range_check (loc, type, common,
554238fd1498Szrj or_op ? ! in0_p : in0_p,
554338fd1498Szrj low0, high0)) != 0
554438fd1498Szrj && (rhs = build_range_check (loc, type, common,
554538fd1498Szrj or_op ? ! in1_p : in1_p,
554638fd1498Szrj low1, high1)) != 0)
554738fd1498Szrj {
554838fd1498Szrj if (strict_overflow_p)
554938fd1498Szrj fold_overflow_warning (warnmsg,
555038fd1498Szrj WARN_STRICT_OVERFLOW_COMPARISON);
555138fd1498Szrj return build2_loc (loc, code == TRUTH_ANDIF_EXPR
555238fd1498Szrj ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
555338fd1498Szrj type, lhs, rhs);
555438fd1498Szrj }
555538fd1498Szrj }
555638fd1498Szrj }
555738fd1498Szrj
555838fd1498Szrj return 0;
555938fd1498Szrj }
556038fd1498Szrj
556138fd1498Szrj /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
556238fd1498Szrj bit value. Arrange things so the extra bits will be set to zero if and
556338fd1498Szrj only if C is signed-extended to its full width. If MASK is nonzero,
556438fd1498Szrj it is an INTEGER_CST that should be AND'ed with the extra bits. */
556538fd1498Szrj
556638fd1498Szrj static tree
unextend(tree c,int p,int unsignedp,tree mask)556738fd1498Szrj unextend (tree c, int p, int unsignedp, tree mask)
556838fd1498Szrj {
556938fd1498Szrj tree type = TREE_TYPE (c);
557038fd1498Szrj int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
557138fd1498Szrj tree temp;
557238fd1498Szrj
557338fd1498Szrj if (p == modesize || unsignedp)
557438fd1498Szrj return c;
557538fd1498Szrj
557638fd1498Szrj /* We work by getting just the sign bit into the low-order bit, then
557738fd1498Szrj into the high-order bit, then sign-extend. We then XOR that value
557838fd1498Szrj with C. */
557938fd1498Szrj temp = build_int_cst (TREE_TYPE (c),
558038fd1498Szrj wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
558138fd1498Szrj
558238fd1498Szrj /* We must use a signed type in order to get an arithmetic right shift.
558338fd1498Szrj However, we must also avoid introducing accidental overflows, so that
558438fd1498Szrj a subsequent call to integer_zerop will work. Hence we must
558538fd1498Szrj do the type conversion here. At this point, the constant is either
558638fd1498Szrj zero or one, and the conversion to a signed type can never overflow.
558738fd1498Szrj We could get an overflow if this conversion is done anywhere else. */
558838fd1498Szrj if (TYPE_UNSIGNED (type))
558938fd1498Szrj temp = fold_convert (signed_type_for (type), temp);
559038fd1498Szrj
559138fd1498Szrj temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
559238fd1498Szrj temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
559338fd1498Szrj if (mask != 0)
559438fd1498Szrj temp = const_binop (BIT_AND_EXPR, temp,
559538fd1498Szrj fold_convert (TREE_TYPE (c), mask));
559638fd1498Szrj /* If necessary, convert the type back to match the type of C. */
559738fd1498Szrj if (TYPE_UNSIGNED (type))
559838fd1498Szrj temp = fold_convert (type, temp);
559938fd1498Szrj
560038fd1498Szrj return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
560138fd1498Szrj }
560238fd1498Szrj
560338fd1498Szrj /* For an expression that has the form
560438fd1498Szrj (A && B) || ~B
560538fd1498Szrj or
560638fd1498Szrj (A || B) && ~B,
560738fd1498Szrj we can drop one of the inner expressions and simplify to
560838fd1498Szrj A || ~B
560938fd1498Szrj or
561038fd1498Szrj A && ~B
561138fd1498Szrj LOC is the location of the resulting expression. OP is the inner
561238fd1498Szrj logical operation; the left-hand side in the examples above, while CMPOP
561338fd1498Szrj is the right-hand side. RHS_ONLY is used to prevent us from accidentally
561438fd1498Szrj removing a condition that guards another, as in
561538fd1498Szrj (A != NULL && A->...) || A == NULL
561638fd1498Szrj which we must not transform. If RHS_ONLY is true, only eliminate the
561738fd1498Szrj right-most operand of the inner logical operation. */
561838fd1498Szrj
561938fd1498Szrj static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)562038fd1498Szrj merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
562138fd1498Szrj bool rhs_only)
562238fd1498Szrj {
562338fd1498Szrj tree type = TREE_TYPE (cmpop);
562438fd1498Szrj enum tree_code code = TREE_CODE (cmpop);
562538fd1498Szrj enum tree_code truthop_code = TREE_CODE (op);
562638fd1498Szrj tree lhs = TREE_OPERAND (op, 0);
562738fd1498Szrj tree rhs = TREE_OPERAND (op, 1);
562838fd1498Szrj tree orig_lhs = lhs, orig_rhs = rhs;
562938fd1498Szrj enum tree_code rhs_code = TREE_CODE (rhs);
563038fd1498Szrj enum tree_code lhs_code = TREE_CODE (lhs);
563138fd1498Szrj enum tree_code inv_code;
563238fd1498Szrj
563338fd1498Szrj if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
563438fd1498Szrj return NULL_TREE;
563538fd1498Szrj
563638fd1498Szrj if (TREE_CODE_CLASS (code) != tcc_comparison)
563738fd1498Szrj return NULL_TREE;
563838fd1498Szrj
563938fd1498Szrj if (rhs_code == truthop_code)
564038fd1498Szrj {
564138fd1498Szrj tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
564238fd1498Szrj if (newrhs != NULL_TREE)
564338fd1498Szrj {
564438fd1498Szrj rhs = newrhs;
564538fd1498Szrj rhs_code = TREE_CODE (rhs);
564638fd1498Szrj }
564738fd1498Szrj }
564838fd1498Szrj if (lhs_code == truthop_code && !rhs_only)
564938fd1498Szrj {
565038fd1498Szrj tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
565138fd1498Szrj if (newlhs != NULL_TREE)
565238fd1498Szrj {
565338fd1498Szrj lhs = newlhs;
565438fd1498Szrj lhs_code = TREE_CODE (lhs);
565538fd1498Szrj }
565638fd1498Szrj }
565738fd1498Szrj
565838fd1498Szrj inv_code = invert_tree_comparison (code, HONOR_NANS (type));
565938fd1498Szrj if (inv_code == rhs_code
566038fd1498Szrj && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
566138fd1498Szrj && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
566238fd1498Szrj return lhs;
566338fd1498Szrj if (!rhs_only && inv_code == lhs_code
566438fd1498Szrj && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
566538fd1498Szrj && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
566638fd1498Szrj return rhs;
566738fd1498Szrj if (rhs != orig_rhs || lhs != orig_lhs)
566838fd1498Szrj return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
566938fd1498Szrj lhs, rhs);
567038fd1498Szrj return NULL_TREE;
567138fd1498Szrj }
567238fd1498Szrj
567338fd1498Szrj /* Find ways of folding logical expressions of LHS and RHS:
567438fd1498Szrj Try to merge two comparisons to the same innermost item.
567538fd1498Szrj Look for range tests like "ch >= '0' && ch <= '9'".
567638fd1498Szrj Look for combinations of simple terms on machines with expensive branches
567738fd1498Szrj and evaluate the RHS unconditionally.
567838fd1498Szrj
567938fd1498Szrj For example, if we have p->a == 2 && p->b == 4 and we can make an
568038fd1498Szrj object large enough to span both A and B, we can do this with a comparison
568138fd1498Szrj against the object ANDed with the a mask.
568238fd1498Szrj
568338fd1498Szrj If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
568438fd1498Szrj operations to do this with one comparison.
568538fd1498Szrj
568638fd1498Szrj We check for both normal comparisons and the BIT_AND_EXPRs made this by
568738fd1498Szrj function and the one above.
568838fd1498Szrj
568938fd1498Szrj CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
569038fd1498Szrj TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
569138fd1498Szrj
569238fd1498Szrj TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
569338fd1498Szrj two operands.
569438fd1498Szrj
569538fd1498Szrj We return the simplified tree or 0 if no optimization is possible. */
569638fd1498Szrj
569738fd1498Szrj static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)569838fd1498Szrj fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
569938fd1498Szrj tree lhs, tree rhs)
570038fd1498Szrj {
570138fd1498Szrj /* If this is the "or" of two comparisons, we can do something if
570238fd1498Szrj the comparisons are NE_EXPR. If this is the "and", we can do something
570338fd1498Szrj if the comparisons are EQ_EXPR. I.e.,
570438fd1498Szrj (a->b == 2 && a->c == 4) can become (a->new == NEW).
570538fd1498Szrj
570638fd1498Szrj WANTED_CODE is this operation code. For single bit fields, we can
570738fd1498Szrj convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
570838fd1498Szrj comparison for one-bit fields. */
570938fd1498Szrj
571038fd1498Szrj enum tree_code wanted_code;
571138fd1498Szrj enum tree_code lcode, rcode;
571238fd1498Szrj tree ll_arg, lr_arg, rl_arg, rr_arg;
571338fd1498Szrj tree ll_inner, lr_inner, rl_inner, rr_inner;
571438fd1498Szrj HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
571538fd1498Szrj HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
571638fd1498Szrj HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
571738fd1498Szrj HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
571838fd1498Szrj int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
571938fd1498Szrj int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
572038fd1498Szrj machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
572138fd1498Szrj scalar_int_mode lnmode, rnmode;
572238fd1498Szrj tree ll_mask, lr_mask, rl_mask, rr_mask;
572338fd1498Szrj tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
572438fd1498Szrj tree l_const, r_const;
572538fd1498Szrj tree lntype, rntype, result;
572638fd1498Szrj HOST_WIDE_INT first_bit, end_bit;
572738fd1498Szrj int volatilep;
572838fd1498Szrj
572938fd1498Szrj /* Start by getting the comparison codes. Fail if anything is volatile.
573038fd1498Szrj If one operand is a BIT_AND_EXPR with the constant one, treat it as if
573138fd1498Szrj it were surrounded with a NE_EXPR. */
573238fd1498Szrj
573338fd1498Szrj if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
573438fd1498Szrj return 0;
573538fd1498Szrj
573638fd1498Szrj lcode = TREE_CODE (lhs);
573738fd1498Szrj rcode = TREE_CODE (rhs);
573838fd1498Szrj
573938fd1498Szrj if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
574038fd1498Szrj {
574138fd1498Szrj lhs = build2 (NE_EXPR, truth_type, lhs,
574238fd1498Szrj build_int_cst (TREE_TYPE (lhs), 0));
574338fd1498Szrj lcode = NE_EXPR;
574438fd1498Szrj }
574538fd1498Szrj
574638fd1498Szrj if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
574738fd1498Szrj {
574838fd1498Szrj rhs = build2 (NE_EXPR, truth_type, rhs,
574938fd1498Szrj build_int_cst (TREE_TYPE (rhs), 0));
575038fd1498Szrj rcode = NE_EXPR;
575138fd1498Szrj }
575238fd1498Szrj
575338fd1498Szrj if (TREE_CODE_CLASS (lcode) != tcc_comparison
575438fd1498Szrj || TREE_CODE_CLASS (rcode) != tcc_comparison)
575538fd1498Szrj return 0;
575638fd1498Szrj
575738fd1498Szrj ll_arg = TREE_OPERAND (lhs, 0);
575838fd1498Szrj lr_arg = TREE_OPERAND (lhs, 1);
575938fd1498Szrj rl_arg = TREE_OPERAND (rhs, 0);
576038fd1498Szrj rr_arg = TREE_OPERAND (rhs, 1);
576138fd1498Szrj
576238fd1498Szrj /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
576338fd1498Szrj if (simple_operand_p (ll_arg)
576438fd1498Szrj && simple_operand_p (lr_arg))
576538fd1498Szrj {
576638fd1498Szrj if (operand_equal_p (ll_arg, rl_arg, 0)
576738fd1498Szrj && operand_equal_p (lr_arg, rr_arg, 0))
576838fd1498Szrj {
576938fd1498Szrj result = combine_comparisons (loc, code, lcode, rcode,
577038fd1498Szrj truth_type, ll_arg, lr_arg);
577138fd1498Szrj if (result)
577238fd1498Szrj return result;
577338fd1498Szrj }
577438fd1498Szrj else if (operand_equal_p (ll_arg, rr_arg, 0)
577538fd1498Szrj && operand_equal_p (lr_arg, rl_arg, 0))
577638fd1498Szrj {
577738fd1498Szrj result = combine_comparisons (loc, code, lcode,
577838fd1498Szrj swap_tree_comparison (rcode),
577938fd1498Szrj truth_type, ll_arg, lr_arg);
578038fd1498Szrj if (result)
578138fd1498Szrj return result;
578238fd1498Szrj }
578338fd1498Szrj }
578438fd1498Szrj
578538fd1498Szrj code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
578638fd1498Szrj ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
578738fd1498Szrj
578838fd1498Szrj /* If the RHS can be evaluated unconditionally and its operands are
578938fd1498Szrj simple, it wins to evaluate the RHS unconditionally on machines
579038fd1498Szrj with expensive branches. In this case, this isn't a comparison
579138fd1498Szrj that can be merged. */
579238fd1498Szrj
579338fd1498Szrj if (BRANCH_COST (optimize_function_for_speed_p (cfun),
579438fd1498Szrj false) >= 2
579538fd1498Szrj && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
579638fd1498Szrj && simple_operand_p (rl_arg)
579738fd1498Szrj && simple_operand_p (rr_arg))
579838fd1498Szrj {
579938fd1498Szrj /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
580038fd1498Szrj if (code == TRUTH_OR_EXPR
580138fd1498Szrj && lcode == NE_EXPR && integer_zerop (lr_arg)
580238fd1498Szrj && rcode == NE_EXPR && integer_zerop (rr_arg)
580338fd1498Szrj && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
580438fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
580538fd1498Szrj return build2_loc (loc, NE_EXPR, truth_type,
580638fd1498Szrj build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
580738fd1498Szrj ll_arg, rl_arg),
580838fd1498Szrj build_int_cst (TREE_TYPE (ll_arg), 0));
580938fd1498Szrj
581038fd1498Szrj /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
581138fd1498Szrj if (code == TRUTH_AND_EXPR
581238fd1498Szrj && lcode == EQ_EXPR && integer_zerop (lr_arg)
581338fd1498Szrj && rcode == EQ_EXPR && integer_zerop (rr_arg)
581438fd1498Szrj && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
581538fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
581638fd1498Szrj return build2_loc (loc, EQ_EXPR, truth_type,
581738fd1498Szrj build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
581838fd1498Szrj ll_arg, rl_arg),
581938fd1498Szrj build_int_cst (TREE_TYPE (ll_arg), 0));
582038fd1498Szrj }
582138fd1498Szrj
582238fd1498Szrj /* See if the comparisons can be merged. Then get all the parameters for
582338fd1498Szrj each side. */
582438fd1498Szrj
582538fd1498Szrj if ((lcode != EQ_EXPR && lcode != NE_EXPR)
582638fd1498Szrj || (rcode != EQ_EXPR && rcode != NE_EXPR))
582738fd1498Szrj return 0;
582838fd1498Szrj
582938fd1498Szrj ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
583038fd1498Szrj volatilep = 0;
583138fd1498Szrj ll_inner = decode_field_reference (loc, &ll_arg,
583238fd1498Szrj &ll_bitsize, &ll_bitpos, &ll_mode,
583338fd1498Szrj &ll_unsignedp, &ll_reversep, &volatilep,
583438fd1498Szrj &ll_mask, &ll_and_mask);
583538fd1498Szrj lr_inner = decode_field_reference (loc, &lr_arg,
583638fd1498Szrj &lr_bitsize, &lr_bitpos, &lr_mode,
583738fd1498Szrj &lr_unsignedp, &lr_reversep, &volatilep,
583838fd1498Szrj &lr_mask, &lr_and_mask);
583938fd1498Szrj rl_inner = decode_field_reference (loc, &rl_arg,
584038fd1498Szrj &rl_bitsize, &rl_bitpos, &rl_mode,
584138fd1498Szrj &rl_unsignedp, &rl_reversep, &volatilep,
584238fd1498Szrj &rl_mask, &rl_and_mask);
584338fd1498Szrj rr_inner = decode_field_reference (loc, &rr_arg,
584438fd1498Szrj &rr_bitsize, &rr_bitpos, &rr_mode,
584538fd1498Szrj &rr_unsignedp, &rr_reversep, &volatilep,
584638fd1498Szrj &rr_mask, &rr_and_mask);
584738fd1498Szrj
584838fd1498Szrj /* It must be true that the inner operation on the lhs of each
584938fd1498Szrj comparison must be the same if we are to be able to do anything.
585038fd1498Szrj Then see if we have constants. If not, the same must be true for
585138fd1498Szrj the rhs's. */
585238fd1498Szrj if (volatilep
585338fd1498Szrj || ll_reversep != rl_reversep
585438fd1498Szrj || ll_inner == 0 || rl_inner == 0
585538fd1498Szrj || ! operand_equal_p (ll_inner, rl_inner, 0))
585638fd1498Szrj return 0;
585738fd1498Szrj
585838fd1498Szrj if (TREE_CODE (lr_arg) == INTEGER_CST
585938fd1498Szrj && TREE_CODE (rr_arg) == INTEGER_CST)
586038fd1498Szrj {
586138fd1498Szrj l_const = lr_arg, r_const = rr_arg;
586238fd1498Szrj lr_reversep = ll_reversep;
586338fd1498Szrj }
586438fd1498Szrj else if (lr_reversep != rr_reversep
586538fd1498Szrj || lr_inner == 0 || rr_inner == 0
586638fd1498Szrj || ! operand_equal_p (lr_inner, rr_inner, 0))
586738fd1498Szrj return 0;
586838fd1498Szrj else
586938fd1498Szrj l_const = r_const = 0;
587038fd1498Szrj
587138fd1498Szrj /* If either comparison code is not correct for our logical operation,
587238fd1498Szrj fail. However, we can convert a one-bit comparison against zero into
587338fd1498Szrj the opposite comparison against that bit being set in the field. */
587438fd1498Szrj
587538fd1498Szrj wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
587638fd1498Szrj if (lcode != wanted_code)
587738fd1498Szrj {
587838fd1498Szrj if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
587938fd1498Szrj {
588038fd1498Szrj /* Make the left operand unsigned, since we are only interested
588138fd1498Szrj in the value of one bit. Otherwise we are doing the wrong
588238fd1498Szrj thing below. */
588338fd1498Szrj ll_unsignedp = 1;
588438fd1498Szrj l_const = ll_mask;
588538fd1498Szrj }
588638fd1498Szrj else
588738fd1498Szrj return 0;
588838fd1498Szrj }
588938fd1498Szrj
589038fd1498Szrj /* This is analogous to the code for l_const above. */
589138fd1498Szrj if (rcode != wanted_code)
589238fd1498Szrj {
589338fd1498Szrj if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
589438fd1498Szrj {
589538fd1498Szrj rl_unsignedp = 1;
589638fd1498Szrj r_const = rl_mask;
589738fd1498Szrj }
589838fd1498Szrj else
589938fd1498Szrj return 0;
590038fd1498Szrj }
590138fd1498Szrj
590238fd1498Szrj /* See if we can find a mode that contains both fields being compared on
590338fd1498Szrj the left. If we can't, fail. Otherwise, update all constants and masks
590438fd1498Szrj to be relative to a field of that size. */
590538fd1498Szrj first_bit = MIN (ll_bitpos, rl_bitpos);
590638fd1498Szrj end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
590738fd1498Szrj if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
590838fd1498Szrj TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
590938fd1498Szrj volatilep, &lnmode))
591038fd1498Szrj return 0;
591138fd1498Szrj
591238fd1498Szrj lnbitsize = GET_MODE_BITSIZE (lnmode);
591338fd1498Szrj lnbitpos = first_bit & ~ (lnbitsize - 1);
591438fd1498Szrj lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
591538fd1498Szrj xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
591638fd1498Szrj
591738fd1498Szrj if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
591838fd1498Szrj {
591938fd1498Szrj xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
592038fd1498Szrj xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
592138fd1498Szrj }
592238fd1498Szrj
592338fd1498Szrj ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
592438fd1498Szrj size_int (xll_bitpos));
592538fd1498Szrj rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
592638fd1498Szrj size_int (xrl_bitpos));
592738fd1498Szrj
592838fd1498Szrj if (l_const)
592938fd1498Szrj {
593038fd1498Szrj l_const = fold_convert_loc (loc, lntype, l_const);
593138fd1498Szrj l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
593238fd1498Szrj l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
593338fd1498Szrj if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
593438fd1498Szrj fold_build1_loc (loc, BIT_NOT_EXPR,
593538fd1498Szrj lntype, ll_mask))))
593638fd1498Szrj {
593738fd1498Szrj warning (0, "comparison is always %d", wanted_code == NE_EXPR);
593838fd1498Szrj
593938fd1498Szrj return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
594038fd1498Szrj }
594138fd1498Szrj }
594238fd1498Szrj if (r_const)
594338fd1498Szrj {
594438fd1498Szrj r_const = fold_convert_loc (loc, lntype, r_const);
594538fd1498Szrj r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
594638fd1498Szrj r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
594738fd1498Szrj if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
594838fd1498Szrj fold_build1_loc (loc, BIT_NOT_EXPR,
594938fd1498Szrj lntype, rl_mask))))
595038fd1498Szrj {
595138fd1498Szrj warning (0, "comparison is always %d", wanted_code == NE_EXPR);
595238fd1498Szrj
595338fd1498Szrj return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
595438fd1498Szrj }
595538fd1498Szrj }
595638fd1498Szrj
595738fd1498Szrj /* If the right sides are not constant, do the same for it. Also,
595858e805e6Szrj disallow this optimization if a size, signedness or storage order
595958e805e6Szrj mismatch occurs between the left and right sides. */
596038fd1498Szrj if (l_const == 0)
596138fd1498Szrj {
596238fd1498Szrj if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
596338fd1498Szrj || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
596458e805e6Szrj || ll_reversep != lr_reversep
596538fd1498Szrj /* Make sure the two fields on the right
596638fd1498Szrj correspond to the left without being swapped. */
596738fd1498Szrj || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
596838fd1498Szrj return 0;
596938fd1498Szrj
597038fd1498Szrj first_bit = MIN (lr_bitpos, rr_bitpos);
597138fd1498Szrj end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
597238fd1498Szrj if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
597338fd1498Szrj TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
597438fd1498Szrj volatilep, &rnmode))
597538fd1498Szrj return 0;
597638fd1498Szrj
597738fd1498Szrj rnbitsize = GET_MODE_BITSIZE (rnmode);
597838fd1498Szrj rnbitpos = first_bit & ~ (rnbitsize - 1);
597938fd1498Szrj rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
598038fd1498Szrj xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
598138fd1498Szrj
598238fd1498Szrj if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
598338fd1498Szrj {
598438fd1498Szrj xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
598538fd1498Szrj xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
598638fd1498Szrj }
598738fd1498Szrj
598838fd1498Szrj lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
598938fd1498Szrj rntype, lr_mask),
599038fd1498Szrj size_int (xlr_bitpos));
599138fd1498Szrj rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
599238fd1498Szrj rntype, rr_mask),
599338fd1498Szrj size_int (xrr_bitpos));
599438fd1498Szrj
599538fd1498Szrj /* Make a mask that corresponds to both fields being compared.
599638fd1498Szrj Do this for both items being compared. If the operands are the
599738fd1498Szrj same size and the bits being compared are in the same position
599838fd1498Szrj then we can do this by masking both and comparing the masked
599938fd1498Szrj results. */
600038fd1498Szrj ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
600138fd1498Szrj lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
600238fd1498Szrj if (lnbitsize == rnbitsize
600338fd1498Szrj && xll_bitpos == xlr_bitpos
600438fd1498Szrj && lnbitpos >= 0
600538fd1498Szrj && rnbitpos >= 0)
600638fd1498Szrj {
600738fd1498Szrj lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
600838fd1498Szrj lntype, lnbitsize, lnbitpos,
600938fd1498Szrj ll_unsignedp || rl_unsignedp, ll_reversep);
601038fd1498Szrj if (! all_ones_mask_p (ll_mask, lnbitsize))
601138fd1498Szrj lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
601238fd1498Szrj
601338fd1498Szrj rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
601438fd1498Szrj rntype, rnbitsize, rnbitpos,
601538fd1498Szrj lr_unsignedp || rr_unsignedp, lr_reversep);
601638fd1498Szrj if (! all_ones_mask_p (lr_mask, rnbitsize))
601738fd1498Szrj rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
601838fd1498Szrj
601938fd1498Szrj return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
602038fd1498Szrj }
602138fd1498Szrj
602238fd1498Szrj /* There is still another way we can do something: If both pairs of
602338fd1498Szrj fields being compared are adjacent, we may be able to make a wider
602438fd1498Szrj field containing them both.
602538fd1498Szrj
602638fd1498Szrj Note that we still must mask the lhs/rhs expressions. Furthermore,
602738fd1498Szrj the mask must be shifted to account for the shift done by
602838fd1498Szrj make_bit_field_ref. */
602938fd1498Szrj if (((ll_bitsize + ll_bitpos == rl_bitpos
603038fd1498Szrj && lr_bitsize + lr_bitpos == rr_bitpos)
603138fd1498Szrj || (ll_bitpos == rl_bitpos + rl_bitsize
603238fd1498Szrj && lr_bitpos == rr_bitpos + rr_bitsize))
603338fd1498Szrj && ll_bitpos >= 0
603438fd1498Szrj && rl_bitpos >= 0
603538fd1498Szrj && lr_bitpos >= 0
603638fd1498Szrj && rr_bitpos >= 0)
603738fd1498Szrj {
603838fd1498Szrj tree type;
603938fd1498Szrj
604038fd1498Szrj lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
604138fd1498Szrj ll_bitsize + rl_bitsize,
604238fd1498Szrj MIN (ll_bitpos, rl_bitpos),
604338fd1498Szrj ll_unsignedp, ll_reversep);
604438fd1498Szrj rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
604538fd1498Szrj lr_bitsize + rr_bitsize,
604638fd1498Szrj MIN (lr_bitpos, rr_bitpos),
604738fd1498Szrj lr_unsignedp, lr_reversep);
604838fd1498Szrj
604938fd1498Szrj ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
605038fd1498Szrj size_int (MIN (xll_bitpos, xrl_bitpos)));
605138fd1498Szrj lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
605238fd1498Szrj size_int (MIN (xlr_bitpos, xrr_bitpos)));
605338fd1498Szrj
605438fd1498Szrj /* Convert to the smaller type before masking out unwanted bits. */
605538fd1498Szrj type = lntype;
605638fd1498Szrj if (lntype != rntype)
605738fd1498Szrj {
605838fd1498Szrj if (lnbitsize > rnbitsize)
605938fd1498Szrj {
606038fd1498Szrj lhs = fold_convert_loc (loc, rntype, lhs);
606138fd1498Szrj ll_mask = fold_convert_loc (loc, rntype, ll_mask);
606238fd1498Szrj type = rntype;
606338fd1498Szrj }
606438fd1498Szrj else if (lnbitsize < rnbitsize)
606538fd1498Szrj {
606638fd1498Szrj rhs = fold_convert_loc (loc, lntype, rhs);
606738fd1498Szrj lr_mask = fold_convert_loc (loc, lntype, lr_mask);
606838fd1498Szrj type = lntype;
606938fd1498Szrj }
607038fd1498Szrj }
607138fd1498Szrj
607238fd1498Szrj if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
607338fd1498Szrj lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
607438fd1498Szrj
607538fd1498Szrj if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
607638fd1498Szrj rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
607738fd1498Szrj
607838fd1498Szrj return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
607938fd1498Szrj }
608038fd1498Szrj
608138fd1498Szrj return 0;
608238fd1498Szrj }
608338fd1498Szrj
608438fd1498Szrj /* Handle the case of comparisons with constants. If there is something in
608538fd1498Szrj common between the masks, those bits of the constants must be the same.
608638fd1498Szrj If not, the condition is always false. Test for this to avoid generating
608738fd1498Szrj incorrect code below. */
608838fd1498Szrj result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
608938fd1498Szrj if (! integer_zerop (result)
609038fd1498Szrj && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
609138fd1498Szrj const_binop (BIT_AND_EXPR, result, r_const)) != 1)
609238fd1498Szrj {
609338fd1498Szrj if (wanted_code == NE_EXPR)
609438fd1498Szrj {
609538fd1498Szrj warning (0, "%<or%> of unmatched not-equal tests is always 1");
609638fd1498Szrj return constant_boolean_node (true, truth_type);
609738fd1498Szrj }
609838fd1498Szrj else
609938fd1498Szrj {
610038fd1498Szrj warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
610138fd1498Szrj return constant_boolean_node (false, truth_type);
610238fd1498Szrj }
610338fd1498Szrj }
610438fd1498Szrj
610538fd1498Szrj if (lnbitpos < 0)
610638fd1498Szrj return 0;
610738fd1498Szrj
610838fd1498Szrj /* Construct the expression we will return. First get the component
610938fd1498Szrj reference we will make. Unless the mask is all ones the width of
611038fd1498Szrj that field, perform the mask operation. Then compare with the
611138fd1498Szrj merged constant. */
611238fd1498Szrj result = make_bit_field_ref (loc, ll_inner, ll_arg,
611338fd1498Szrj lntype, lnbitsize, lnbitpos,
611438fd1498Szrj ll_unsignedp || rl_unsignedp, ll_reversep);
611538fd1498Szrj
611638fd1498Szrj ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
611738fd1498Szrj if (! all_ones_mask_p (ll_mask, lnbitsize))
611838fd1498Szrj result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
611938fd1498Szrj
612038fd1498Szrj return build2_loc (loc, wanted_code, truth_type, result,
612138fd1498Szrj const_binop (BIT_IOR_EXPR, l_const, r_const));
612238fd1498Szrj }
612338fd1498Szrj
612438fd1498Szrj /* T is an integer expression that is being multiplied, divided, or taken a
612538fd1498Szrj modulus (CODE says which and what kind of divide or modulus) by a
612638fd1498Szrj constant C. See if we can eliminate that operation by folding it with
612738fd1498Szrj other operations already in T. WIDE_TYPE, if non-null, is a type that
612838fd1498Szrj should be used for the computation if wider than our type.
612938fd1498Szrj
613038fd1498Szrj For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
613138fd1498Szrj (X * 2) + (Y * 4). We must, however, be assured that either the original
613238fd1498Szrj expression would not overflow or that overflow is undefined for the type
613338fd1498Szrj in the language in question.
613438fd1498Szrj
613538fd1498Szrj If we return a non-null expression, it is an equivalent form of the
613638fd1498Szrj original computation, but need not be in the original type.
613738fd1498Szrj
613838fd1498Szrj We set *STRICT_OVERFLOW_P to true if the return values depends on
613938fd1498Szrj signed overflow being undefined. Otherwise we do not change
614038fd1498Szrj *STRICT_OVERFLOW_P. */
614138fd1498Szrj
614238fd1498Szrj static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)614338fd1498Szrj extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
614438fd1498Szrj bool *strict_overflow_p)
614538fd1498Szrj {
614638fd1498Szrj /* To avoid exponential search depth, refuse to allow recursion past
614738fd1498Szrj three levels. Beyond that (1) it's highly unlikely that we'll find
614838fd1498Szrj something interesting and (2) we've probably processed it before
614938fd1498Szrj when we built the inner expression. */
615038fd1498Szrj
615138fd1498Szrj static int depth;
615238fd1498Szrj tree ret;
615338fd1498Szrj
615438fd1498Szrj if (depth > 3)
615538fd1498Szrj return NULL;
615638fd1498Szrj
615738fd1498Szrj depth++;
615838fd1498Szrj ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
615938fd1498Szrj depth--;
616038fd1498Szrj
616138fd1498Szrj return ret;
616238fd1498Szrj }
616338fd1498Szrj
616438fd1498Szrj static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)616538fd1498Szrj extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
616638fd1498Szrj bool *strict_overflow_p)
616738fd1498Szrj {
616838fd1498Szrj tree type = TREE_TYPE (t);
616938fd1498Szrj enum tree_code tcode = TREE_CODE (t);
617038fd1498Szrj tree ctype = (wide_type != 0
617138fd1498Szrj && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
617238fd1498Szrj > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
617338fd1498Szrj ? wide_type : type);
617438fd1498Szrj tree t1, t2;
617538fd1498Szrj int same_p = tcode == code;
617638fd1498Szrj tree op0 = NULL_TREE, op1 = NULL_TREE;
617738fd1498Szrj bool sub_strict_overflow_p;
617838fd1498Szrj
617938fd1498Szrj /* Don't deal with constants of zero here; they confuse the code below. */
618038fd1498Szrj if (integer_zerop (c))
618138fd1498Szrj return NULL_TREE;
618238fd1498Szrj
618338fd1498Szrj if (TREE_CODE_CLASS (tcode) == tcc_unary)
618438fd1498Szrj op0 = TREE_OPERAND (t, 0);
618538fd1498Szrj
618638fd1498Szrj if (TREE_CODE_CLASS (tcode) == tcc_binary)
618738fd1498Szrj op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
618838fd1498Szrj
618938fd1498Szrj /* Note that we need not handle conditional operations here since fold
619038fd1498Szrj already handles those cases. So just do arithmetic here. */
619138fd1498Szrj switch (tcode)
619238fd1498Szrj {
619338fd1498Szrj case INTEGER_CST:
619438fd1498Szrj /* For a constant, we can always simplify if we are a multiply
619538fd1498Szrj or (for divide and modulus) if it is a multiple of our constant. */
619638fd1498Szrj if (code == MULT_EXPR
619738fd1498Szrj || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
619838fd1498Szrj TYPE_SIGN (type)))
619938fd1498Szrj {
620038fd1498Szrj tree tem = const_binop (code, fold_convert (ctype, t),
620138fd1498Szrj fold_convert (ctype, c));
620238fd1498Szrj /* If the multiplication overflowed, we lost information on it.
620338fd1498Szrj See PR68142 and PR69845. */
620438fd1498Szrj if (TREE_OVERFLOW (tem))
620538fd1498Szrj return NULL_TREE;
620638fd1498Szrj return tem;
620738fd1498Szrj }
620838fd1498Szrj break;
620938fd1498Szrj
621038fd1498Szrj CASE_CONVERT: case NON_LVALUE_EXPR:
621138fd1498Szrj /* If op0 is an expression ... */
621238fd1498Szrj if ((COMPARISON_CLASS_P (op0)
621338fd1498Szrj || UNARY_CLASS_P (op0)
621438fd1498Szrj || BINARY_CLASS_P (op0)
621538fd1498Szrj || VL_EXP_CLASS_P (op0)
621638fd1498Szrj || EXPRESSION_CLASS_P (op0))
621738fd1498Szrj /* ... and has wrapping overflow, and its type is smaller
621838fd1498Szrj than ctype, then we cannot pass through as widening. */
621938fd1498Szrj && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
622038fd1498Szrj && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
622138fd1498Szrj && (TYPE_PRECISION (ctype)
622238fd1498Szrj > TYPE_PRECISION (TREE_TYPE (op0))))
622338fd1498Szrj /* ... or this is a truncation (t is narrower than op0),
622438fd1498Szrj then we cannot pass through this narrowing. */
622538fd1498Szrj || (TYPE_PRECISION (type)
622638fd1498Szrj < TYPE_PRECISION (TREE_TYPE (op0)))
622738fd1498Szrj /* ... or signedness changes for division or modulus,
622838fd1498Szrj then we cannot pass through this conversion. */
622938fd1498Szrj || (code != MULT_EXPR
623038fd1498Szrj && (TYPE_UNSIGNED (ctype)
623138fd1498Szrj != TYPE_UNSIGNED (TREE_TYPE (op0))))
623238fd1498Szrj /* ... or has undefined overflow while the converted to
623338fd1498Szrj type has not, we cannot do the operation in the inner type
623438fd1498Szrj as that would introduce undefined overflow. */
623538fd1498Szrj || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
623638fd1498Szrj && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
623738fd1498Szrj && !TYPE_OVERFLOW_UNDEFINED (type))))
623838fd1498Szrj break;
623938fd1498Szrj
624038fd1498Szrj /* Pass the constant down and see if we can make a simplification. If
624138fd1498Szrj we can, replace this expression with the inner simplification for
624238fd1498Szrj possible later conversion to our or some other type. */
624338fd1498Szrj if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
624438fd1498Szrj && TREE_CODE (t2) == INTEGER_CST
624538fd1498Szrj && !TREE_OVERFLOW (t2)
624638fd1498Szrj && (t1 = extract_muldiv (op0, t2, code,
624738fd1498Szrj code == MULT_EXPR ? ctype : NULL_TREE,
624838fd1498Szrj strict_overflow_p)) != 0)
624938fd1498Szrj return t1;
625038fd1498Szrj break;
625138fd1498Szrj
625238fd1498Szrj case ABS_EXPR:
625338fd1498Szrj /* If widening the type changes it from signed to unsigned, then we
625438fd1498Szrj must avoid building ABS_EXPR itself as unsigned. */
625538fd1498Szrj if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
625638fd1498Szrj {
625738fd1498Szrj tree cstype = (*signed_type_for) (ctype);
625838fd1498Szrj if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
625938fd1498Szrj != 0)
626038fd1498Szrj {
626138fd1498Szrj t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
626238fd1498Szrj return fold_convert (ctype, t1);
626338fd1498Szrj }
626438fd1498Szrj break;
626538fd1498Szrj }
626638fd1498Szrj /* If the constant is negative, we cannot simplify this. */
626738fd1498Szrj if (tree_int_cst_sgn (c) == -1)
626838fd1498Szrj break;
626938fd1498Szrj /* FALLTHROUGH */
627038fd1498Szrj case NEGATE_EXPR:
627138fd1498Szrj /* For division and modulus, type can't be unsigned, as e.g.
627238fd1498Szrj (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
627338fd1498Szrj For signed types, even with wrapping overflow, this is fine. */
627438fd1498Szrj if (code != MULT_EXPR && TYPE_UNSIGNED (type))
627538fd1498Szrj break;
627638fd1498Szrj if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
627738fd1498Szrj != 0)
627838fd1498Szrj return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
627938fd1498Szrj break;
628038fd1498Szrj
628138fd1498Szrj case MIN_EXPR: case MAX_EXPR:
628238fd1498Szrj /* If widening the type changes the signedness, then we can't perform
628338fd1498Szrj this optimization as that changes the result. */
628438fd1498Szrj if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
628538fd1498Szrj break;
628638fd1498Szrj
628738fd1498Szrj /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
628838fd1498Szrj sub_strict_overflow_p = false;
628938fd1498Szrj if ((t1 = extract_muldiv (op0, c, code, wide_type,
629038fd1498Szrj &sub_strict_overflow_p)) != 0
629138fd1498Szrj && (t2 = extract_muldiv (op1, c, code, wide_type,
629238fd1498Szrj &sub_strict_overflow_p)) != 0)
629338fd1498Szrj {
629438fd1498Szrj if (tree_int_cst_sgn (c) < 0)
629538fd1498Szrj tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
629638fd1498Szrj if (sub_strict_overflow_p)
629738fd1498Szrj *strict_overflow_p = true;
629838fd1498Szrj return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
629938fd1498Szrj fold_convert (ctype, t2));
630038fd1498Szrj }
630138fd1498Szrj break;
630238fd1498Szrj
630338fd1498Szrj case LSHIFT_EXPR: case RSHIFT_EXPR:
630438fd1498Szrj /* If the second operand is constant, this is a multiplication
630538fd1498Szrj or floor division, by a power of two, so we can treat it that
630638fd1498Szrj way unless the multiplier or divisor overflows. Signed
630738fd1498Szrj left-shift overflow is implementation-defined rather than
630838fd1498Szrj undefined in C90, so do not convert signed left shift into
630938fd1498Szrj multiplication. */
631038fd1498Szrj if (TREE_CODE (op1) == INTEGER_CST
631138fd1498Szrj && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
631238fd1498Szrj /* const_binop may not detect overflow correctly,
631338fd1498Szrj so check for it explicitly here. */
631438fd1498Szrj && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
631538fd1498Szrj wi::to_wide (op1))
631638fd1498Szrj && (t1 = fold_convert (ctype,
631738fd1498Szrj const_binop (LSHIFT_EXPR, size_one_node,
631838fd1498Szrj op1))) != 0
631938fd1498Szrj && !TREE_OVERFLOW (t1))
632038fd1498Szrj return extract_muldiv (build2 (tcode == LSHIFT_EXPR
632138fd1498Szrj ? MULT_EXPR : FLOOR_DIV_EXPR,
632238fd1498Szrj ctype,
632338fd1498Szrj fold_convert (ctype, op0),
632438fd1498Szrj t1),
632538fd1498Szrj c, code, wide_type, strict_overflow_p);
632638fd1498Szrj break;
632738fd1498Szrj
632838fd1498Szrj case PLUS_EXPR: case MINUS_EXPR:
632938fd1498Szrj /* See if we can eliminate the operation on both sides. If we can, we
633038fd1498Szrj can return a new PLUS or MINUS. If we can't, the only remaining
633138fd1498Szrj cases where we can do anything are if the second operand is a
633238fd1498Szrj constant. */
633338fd1498Szrj sub_strict_overflow_p = false;
633438fd1498Szrj t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
633538fd1498Szrj t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
633638fd1498Szrj if (t1 != 0 && t2 != 0
633738fd1498Szrj && TYPE_OVERFLOW_WRAPS (ctype)
633838fd1498Szrj && (code == MULT_EXPR
633938fd1498Szrj /* If not multiplication, we can only do this if both operands
634038fd1498Szrj are divisible by c. */
634138fd1498Szrj || (multiple_of_p (ctype, op0, c)
634238fd1498Szrj && multiple_of_p (ctype, op1, c))))
634338fd1498Szrj {
634438fd1498Szrj if (sub_strict_overflow_p)
634538fd1498Szrj *strict_overflow_p = true;
634638fd1498Szrj return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
634738fd1498Szrj fold_convert (ctype, t2));
634838fd1498Szrj }
634938fd1498Szrj
635038fd1498Szrj /* If this was a subtraction, negate OP1 and set it to be an addition.
635138fd1498Szrj This simplifies the logic below. */
635238fd1498Szrj if (tcode == MINUS_EXPR)
635338fd1498Szrj {
635438fd1498Szrj tcode = PLUS_EXPR, op1 = negate_expr (op1);
635538fd1498Szrj /* If OP1 was not easily negatable, the constant may be OP0. */
635638fd1498Szrj if (TREE_CODE (op0) == INTEGER_CST)
635738fd1498Szrj {
635838fd1498Szrj std::swap (op0, op1);
635938fd1498Szrj std::swap (t1, t2);
636038fd1498Szrj }
636138fd1498Szrj }
636238fd1498Szrj
636338fd1498Szrj if (TREE_CODE (op1) != INTEGER_CST)
636438fd1498Szrj break;
636538fd1498Szrj
636638fd1498Szrj /* If either OP1 or C are negative, this optimization is not safe for
636738fd1498Szrj some of the division and remainder types while for others we need
636838fd1498Szrj to change the code. */
636938fd1498Szrj if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
637038fd1498Szrj {
637138fd1498Szrj if (code == CEIL_DIV_EXPR)
637238fd1498Szrj code = FLOOR_DIV_EXPR;
637338fd1498Szrj else if (code == FLOOR_DIV_EXPR)
637438fd1498Szrj code = CEIL_DIV_EXPR;
637538fd1498Szrj else if (code != MULT_EXPR
637638fd1498Szrj && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
637738fd1498Szrj break;
637838fd1498Szrj }
637938fd1498Szrj
638038fd1498Szrj /* If it's a multiply or a division/modulus operation of a multiple
638138fd1498Szrj of our constant, do the operation and verify it doesn't overflow. */
638238fd1498Szrj if (code == MULT_EXPR
638338fd1498Szrj || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
638438fd1498Szrj TYPE_SIGN (type)))
638538fd1498Szrj {
638638fd1498Szrj op1 = const_binop (code, fold_convert (ctype, op1),
638738fd1498Szrj fold_convert (ctype, c));
638838fd1498Szrj /* We allow the constant to overflow with wrapping semantics. */
638938fd1498Szrj if (op1 == 0
639038fd1498Szrj || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
639138fd1498Szrj break;
639238fd1498Szrj }
639338fd1498Szrj else
639438fd1498Szrj break;
639538fd1498Szrj
639638fd1498Szrj /* If we have an unsigned type, we cannot widen the operation since it
639738fd1498Szrj will change the result if the original computation overflowed. */
639838fd1498Szrj if (TYPE_UNSIGNED (ctype) && ctype != type)
639938fd1498Szrj break;
640038fd1498Szrj
640138fd1498Szrj /* The last case is if we are a multiply. In that case, we can
640238fd1498Szrj apply the distributive law to commute the multiply and addition
640338fd1498Szrj if the multiplication of the constants doesn't overflow
640438fd1498Szrj and overflow is defined. With undefined overflow
640538fd1498Szrj op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
640638fd1498Szrj if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
640738fd1498Szrj return fold_build2 (tcode, ctype,
640838fd1498Szrj fold_build2 (code, ctype,
640938fd1498Szrj fold_convert (ctype, op0),
641038fd1498Szrj fold_convert (ctype, c)),
641138fd1498Szrj op1);
641238fd1498Szrj
641338fd1498Szrj break;
641438fd1498Szrj
641538fd1498Szrj case MULT_EXPR:
641638fd1498Szrj /* We have a special case here if we are doing something like
641738fd1498Szrj (C * 8) % 4 since we know that's zero. */
641838fd1498Szrj if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
641938fd1498Szrj || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
642038fd1498Szrj /* If the multiplication can overflow we cannot optimize this. */
642138fd1498Szrj && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
642238fd1498Szrj && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
642338fd1498Szrj && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
642438fd1498Szrj TYPE_SIGN (type)))
642538fd1498Szrj {
642638fd1498Szrj *strict_overflow_p = true;
642738fd1498Szrj return omit_one_operand (type, integer_zero_node, op0);
642838fd1498Szrj }
642938fd1498Szrj
643038fd1498Szrj /* ... fall through ... */
643138fd1498Szrj
643238fd1498Szrj case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
643338fd1498Szrj case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
643438fd1498Szrj /* If we can extract our operation from the LHS, do so and return a
643538fd1498Szrj new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
643638fd1498Szrj do something only if the second operand is a constant. */
643738fd1498Szrj if (same_p
643838fd1498Szrj && TYPE_OVERFLOW_WRAPS (ctype)
643938fd1498Szrj && (t1 = extract_muldiv (op0, c, code, wide_type,
644038fd1498Szrj strict_overflow_p)) != 0)
644138fd1498Szrj return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
644238fd1498Szrj fold_convert (ctype, op1));
644338fd1498Szrj else if (tcode == MULT_EXPR && code == MULT_EXPR
644438fd1498Szrj && TYPE_OVERFLOW_WRAPS (ctype)
644538fd1498Szrj && (t1 = extract_muldiv (op1, c, code, wide_type,
644638fd1498Szrj strict_overflow_p)) != 0)
644738fd1498Szrj return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
644838fd1498Szrj fold_convert (ctype, t1));
644938fd1498Szrj else if (TREE_CODE (op1) != INTEGER_CST)
645038fd1498Szrj return 0;
645138fd1498Szrj
645238fd1498Szrj /* If these are the same operation types, we can associate them
645338fd1498Szrj assuming no overflow. */
645438fd1498Szrj if (tcode == code)
645538fd1498Szrj {
645638fd1498Szrj bool overflow_p = false;
645738fd1498Szrj bool overflow_mul_p;
645838fd1498Szrj signop sign = TYPE_SIGN (ctype);
645938fd1498Szrj unsigned prec = TYPE_PRECISION (ctype);
646038fd1498Szrj wide_int mul = wi::mul (wi::to_wide (op1, prec),
646138fd1498Szrj wi::to_wide (c, prec),
646238fd1498Szrj sign, &overflow_mul_p);
646338fd1498Szrj overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
646438fd1498Szrj if (overflow_mul_p
646538fd1498Szrj && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
646638fd1498Szrj overflow_p = true;
646738fd1498Szrj if (!overflow_p)
646838fd1498Szrj return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
646938fd1498Szrj wide_int_to_tree (ctype, mul));
647038fd1498Szrj }
647138fd1498Szrj
647238fd1498Szrj /* If these operations "cancel" each other, we have the main
647338fd1498Szrj optimizations of this pass, which occur when either constant is a
647438fd1498Szrj multiple of the other, in which case we replace this with either an
647538fd1498Szrj operation or CODE or TCODE.
647638fd1498Szrj
647738fd1498Szrj If we have an unsigned type, we cannot do this since it will change
647838fd1498Szrj the result if the original computation overflowed. */
647938fd1498Szrj if (TYPE_OVERFLOW_UNDEFINED (ctype)
648038fd1498Szrj && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
648138fd1498Szrj || (tcode == MULT_EXPR
648238fd1498Szrj && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
648338fd1498Szrj && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
648438fd1498Szrj && code != MULT_EXPR)))
648538fd1498Szrj {
648638fd1498Szrj if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
648738fd1498Szrj TYPE_SIGN (type)))
648838fd1498Szrj {
648938fd1498Szrj if (TYPE_OVERFLOW_UNDEFINED (ctype))
649038fd1498Szrj *strict_overflow_p = true;
649138fd1498Szrj return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
649238fd1498Szrj fold_convert (ctype,
649338fd1498Szrj const_binop (TRUNC_DIV_EXPR,
649438fd1498Szrj op1, c)));
649538fd1498Szrj }
649638fd1498Szrj else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
649738fd1498Szrj TYPE_SIGN (type)))
649838fd1498Szrj {
649938fd1498Szrj if (TYPE_OVERFLOW_UNDEFINED (ctype))
650038fd1498Szrj *strict_overflow_p = true;
650138fd1498Szrj return fold_build2 (code, ctype, fold_convert (ctype, op0),
650238fd1498Szrj fold_convert (ctype,
650338fd1498Szrj const_binop (TRUNC_DIV_EXPR,
650438fd1498Szrj c, op1)));
650538fd1498Szrj }
650638fd1498Szrj }
650738fd1498Szrj break;
650838fd1498Szrj
650938fd1498Szrj default:
651038fd1498Szrj break;
651138fd1498Szrj }
651238fd1498Szrj
651338fd1498Szrj return 0;
651438fd1498Szrj }
651538fd1498Szrj
651638fd1498Szrj /* Return a node which has the indicated constant VALUE (either 0 or
651738fd1498Szrj 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
651838fd1498Szrj and is of the indicated TYPE. */
651938fd1498Szrj
652038fd1498Szrj tree
constant_boolean_node(bool value,tree type)652138fd1498Szrj constant_boolean_node (bool value, tree type)
652238fd1498Szrj {
652338fd1498Szrj if (type == integer_type_node)
652438fd1498Szrj return value ? integer_one_node : integer_zero_node;
652538fd1498Szrj else if (type == boolean_type_node)
652638fd1498Szrj return value ? boolean_true_node : boolean_false_node;
652738fd1498Szrj else if (TREE_CODE (type) == VECTOR_TYPE)
652838fd1498Szrj return build_vector_from_val (type,
652938fd1498Szrj build_int_cst (TREE_TYPE (type),
653038fd1498Szrj value ? -1 : 0));
653138fd1498Szrj else
653238fd1498Szrj return fold_convert (type, value ? integer_one_node : integer_zero_node);
653338fd1498Szrj }
653438fd1498Szrj
653538fd1498Szrj
653638fd1498Szrj /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
653738fd1498Szrj Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
653838fd1498Szrj CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
653938fd1498Szrj expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
654038fd1498Szrj COND is the first argument to CODE; otherwise (as in the example
654138fd1498Szrj given here), it is the second argument. TYPE is the type of the
654238fd1498Szrj original expression. Return NULL_TREE if no simplification is
654338fd1498Szrj possible. */
654438fd1498Szrj
654538fd1498Szrj static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)654638fd1498Szrj fold_binary_op_with_conditional_arg (location_t loc,
654738fd1498Szrj enum tree_code code,
654838fd1498Szrj tree type, tree op0, tree op1,
654938fd1498Szrj tree cond, tree arg, int cond_first_p)
655038fd1498Szrj {
655138fd1498Szrj tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
655238fd1498Szrj tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
655338fd1498Szrj tree test, true_value, false_value;
655438fd1498Szrj tree lhs = NULL_TREE;
655538fd1498Szrj tree rhs = NULL_TREE;
655638fd1498Szrj enum tree_code cond_code = COND_EXPR;
655738fd1498Szrj
655838fd1498Szrj if (TREE_CODE (cond) == COND_EXPR
655938fd1498Szrj || TREE_CODE (cond) == VEC_COND_EXPR)
656038fd1498Szrj {
656138fd1498Szrj test = TREE_OPERAND (cond, 0);
656238fd1498Szrj true_value = TREE_OPERAND (cond, 1);
656338fd1498Szrj false_value = TREE_OPERAND (cond, 2);
656438fd1498Szrj /* If this operand throws an expression, then it does not make
656538fd1498Szrj sense to try to perform a logical or arithmetic operation
656638fd1498Szrj involving it. */
656738fd1498Szrj if (VOID_TYPE_P (TREE_TYPE (true_value)))
656838fd1498Szrj lhs = true_value;
656938fd1498Szrj if (VOID_TYPE_P (TREE_TYPE (false_value)))
657038fd1498Szrj rhs = false_value;
657138fd1498Szrj }
657238fd1498Szrj else if (!(TREE_CODE (type) != VECTOR_TYPE
657338fd1498Szrj && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
657438fd1498Szrj {
657538fd1498Szrj tree testtype = TREE_TYPE (cond);
657638fd1498Szrj test = cond;
657738fd1498Szrj true_value = constant_boolean_node (true, testtype);
657838fd1498Szrj false_value = constant_boolean_node (false, testtype);
657938fd1498Szrj }
658038fd1498Szrj else
658138fd1498Szrj /* Detect the case of mixing vector and scalar types - bail out. */
658238fd1498Szrj return NULL_TREE;
658338fd1498Szrj
658438fd1498Szrj if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
658538fd1498Szrj cond_code = VEC_COND_EXPR;
658638fd1498Szrj
658738fd1498Szrj /* This transformation is only worthwhile if we don't have to wrap ARG
658838fd1498Szrj in a SAVE_EXPR and the operation can be simplified without recursing
658938fd1498Szrj on at least one of the branches once its pushed inside the COND_EXPR. */
659038fd1498Szrj if (!TREE_CONSTANT (arg)
659138fd1498Szrj && (TREE_SIDE_EFFECTS (arg)
659238fd1498Szrj || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
659338fd1498Szrj || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
659438fd1498Szrj return NULL_TREE;
659538fd1498Szrj
659638fd1498Szrj arg = fold_convert_loc (loc, arg_type, arg);
659738fd1498Szrj if (lhs == 0)
659838fd1498Szrj {
659938fd1498Szrj true_value = fold_convert_loc (loc, cond_type, true_value);
660038fd1498Szrj if (cond_first_p)
660138fd1498Szrj lhs = fold_build2_loc (loc, code, type, true_value, arg);
660238fd1498Szrj else
660338fd1498Szrj lhs = fold_build2_loc (loc, code, type, arg, true_value);
660438fd1498Szrj }
660538fd1498Szrj if (rhs == 0)
660638fd1498Szrj {
660738fd1498Szrj false_value = fold_convert_loc (loc, cond_type, false_value);
660838fd1498Szrj if (cond_first_p)
660938fd1498Szrj rhs = fold_build2_loc (loc, code, type, false_value, arg);
661038fd1498Szrj else
661138fd1498Szrj rhs = fold_build2_loc (loc, code, type, arg, false_value);
661238fd1498Szrj }
661338fd1498Szrj
661438fd1498Szrj /* Check that we have simplified at least one of the branches. */
661538fd1498Szrj if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
661638fd1498Szrj return NULL_TREE;
661738fd1498Szrj
661838fd1498Szrj return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
661938fd1498Szrj }
662038fd1498Szrj
662138fd1498Szrj
662238fd1498Szrj /* Subroutine of fold() that checks for the addition of +/- 0.0.
662338fd1498Szrj
662438fd1498Szrj If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
662538fd1498Szrj TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
662638fd1498Szrj ADDEND is the same as X.
662738fd1498Szrj
662838fd1498Szrj X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
662938fd1498Szrj and finite. The problematic cases are when X is zero, and its mode
663038fd1498Szrj has signed zeros. In the case of rounding towards -infinity,
663138fd1498Szrj X - 0 is not the same as X because 0 - 0 is -0. In other rounding
663238fd1498Szrj modes, X + 0 is not the same as X because -0 + 0 is 0. */
663338fd1498Szrj
663438fd1498Szrj bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)663538fd1498Szrj fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
663638fd1498Szrj {
663738fd1498Szrj if (!real_zerop (addend))
663838fd1498Szrj return false;
663938fd1498Szrj
664038fd1498Szrj /* Don't allow the fold with -fsignaling-nans. */
664138fd1498Szrj if (HONOR_SNANS (element_mode (type)))
664238fd1498Szrj return false;
664338fd1498Szrj
664438fd1498Szrj /* Allow the fold if zeros aren't signed, or their sign isn't important. */
664538fd1498Szrj if (!HONOR_SIGNED_ZEROS (element_mode (type)))
664638fd1498Szrj return true;
664738fd1498Szrj
664838fd1498Szrj /* In a vector or complex, we would need to check the sign of all zeros. */
664938fd1498Szrj if (TREE_CODE (addend) != REAL_CST)
665038fd1498Szrj return false;
665138fd1498Szrj
665238fd1498Szrj /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
665338fd1498Szrj if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
665438fd1498Szrj negate = !negate;
665538fd1498Szrj
665638fd1498Szrj /* The mode has signed zeros, and we have to honor their sign.
665738fd1498Szrj In this situation, there is only one case we can return true for.
665838fd1498Szrj X - 0 is the same as X unless rounding towards -infinity is
665938fd1498Szrj supported. */
666038fd1498Szrj return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
666138fd1498Szrj }
666238fd1498Szrj
666338fd1498Szrj /* Subroutine of match.pd that optimizes comparisons of a division by
666438fd1498Szrj a nonzero integer constant against an integer constant, i.e.
666538fd1498Szrj X/C1 op C2.
666638fd1498Szrj
666738fd1498Szrj CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
666838fd1498Szrj GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
666938fd1498Szrj
667038fd1498Szrj enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)667138fd1498Szrj fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
667238fd1498Szrj tree *hi, bool *neg_overflow)
667338fd1498Szrj {
667438fd1498Szrj tree prod, tmp, type = TREE_TYPE (c1);
667538fd1498Szrj signop sign = TYPE_SIGN (type);
667638fd1498Szrj bool overflow;
667738fd1498Szrj
667838fd1498Szrj /* We have to do this the hard way to detect unsigned overflow.
667938fd1498Szrj prod = int_const_binop (MULT_EXPR, c1, c2); */
668038fd1498Szrj wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
668138fd1498Szrj prod = force_fit_type (type, val, -1, overflow);
668238fd1498Szrj *neg_overflow = false;
668338fd1498Szrj
668438fd1498Szrj if (sign == UNSIGNED)
668538fd1498Szrj {
668638fd1498Szrj tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
668738fd1498Szrj *lo = prod;
668838fd1498Szrj
668938fd1498Szrj /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
669038fd1498Szrj val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
669138fd1498Szrj *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
669238fd1498Szrj }
669338fd1498Szrj else if (tree_int_cst_sgn (c1) >= 0)
669438fd1498Szrj {
669538fd1498Szrj tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
669638fd1498Szrj switch (tree_int_cst_sgn (c2))
669738fd1498Szrj {
669838fd1498Szrj case -1:
669938fd1498Szrj *neg_overflow = true;
670038fd1498Szrj *lo = int_const_binop (MINUS_EXPR, prod, tmp);
670138fd1498Szrj *hi = prod;
670238fd1498Szrj break;
670338fd1498Szrj
670438fd1498Szrj case 0:
670538fd1498Szrj *lo = fold_negate_const (tmp, type);
670638fd1498Szrj *hi = tmp;
670738fd1498Szrj break;
670838fd1498Szrj
670938fd1498Szrj case 1:
671038fd1498Szrj *hi = int_const_binop (PLUS_EXPR, prod, tmp);
671138fd1498Szrj *lo = prod;
671238fd1498Szrj break;
671338fd1498Szrj
671438fd1498Szrj default:
671538fd1498Szrj gcc_unreachable ();
671638fd1498Szrj }
671738fd1498Szrj }
671838fd1498Szrj else
671938fd1498Szrj {
672038fd1498Szrj /* A negative divisor reverses the relational operators. */
672138fd1498Szrj code = swap_tree_comparison (code);
672238fd1498Szrj
672338fd1498Szrj tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
672438fd1498Szrj switch (tree_int_cst_sgn (c2))
672538fd1498Szrj {
672638fd1498Szrj case -1:
672738fd1498Szrj *hi = int_const_binop (MINUS_EXPR, prod, tmp);
672838fd1498Szrj *lo = prod;
672938fd1498Szrj break;
673038fd1498Szrj
673138fd1498Szrj case 0:
673238fd1498Szrj *hi = fold_negate_const (tmp, type);
673338fd1498Szrj *lo = tmp;
673438fd1498Szrj break;
673538fd1498Szrj
673638fd1498Szrj case 1:
673738fd1498Szrj *neg_overflow = true;
673838fd1498Szrj *lo = int_const_binop (PLUS_EXPR, prod, tmp);
673938fd1498Szrj *hi = prod;
674038fd1498Szrj break;
674138fd1498Szrj
674238fd1498Szrj default:
674338fd1498Szrj gcc_unreachable ();
674438fd1498Szrj }
674538fd1498Szrj }
674638fd1498Szrj
674738fd1498Szrj if (code != EQ_EXPR && code != NE_EXPR)
674838fd1498Szrj return code;
674938fd1498Szrj
675038fd1498Szrj if (TREE_OVERFLOW (*lo)
675138fd1498Szrj || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
675238fd1498Szrj *lo = NULL_TREE;
675338fd1498Szrj if (TREE_OVERFLOW (*hi)
675438fd1498Szrj || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
675538fd1498Szrj *hi = NULL_TREE;
675638fd1498Szrj
675738fd1498Szrj return code;
675838fd1498Szrj }
675938fd1498Szrj
676038fd1498Szrj
676138fd1498Szrj /* If CODE with arguments ARG0 and ARG1 represents a single bit
676238fd1498Szrj equality/inequality test, then return a simplified form of the test
676338fd1498Szrj using a sign testing. Otherwise return NULL. TYPE is the desired
676438fd1498Szrj result type. */
676538fd1498Szrj
676638fd1498Szrj static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)676738fd1498Szrj fold_single_bit_test_into_sign_test (location_t loc,
676838fd1498Szrj enum tree_code code, tree arg0, tree arg1,
676938fd1498Szrj tree result_type)
677038fd1498Szrj {
677138fd1498Szrj /* If this is testing a single bit, we can optimize the test. */
677238fd1498Szrj if ((code == NE_EXPR || code == EQ_EXPR)
677338fd1498Szrj && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
677438fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1)))
677538fd1498Szrj {
677638fd1498Szrj /* If we have (A & C) != 0 where C is the sign bit of A, convert
677738fd1498Szrj this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
677838fd1498Szrj tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
677938fd1498Szrj
678038fd1498Szrj if (arg00 != NULL_TREE
678138fd1498Szrj /* This is only a win if casting to a signed type is cheap,
678238fd1498Szrj i.e. when arg00's type is not a partial mode. */
678338fd1498Szrj && type_has_mode_precision_p (TREE_TYPE (arg00)))
678438fd1498Szrj {
678538fd1498Szrj tree stype = signed_type_for (TREE_TYPE (arg00));
678638fd1498Szrj return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
678738fd1498Szrj result_type,
678838fd1498Szrj fold_convert_loc (loc, stype, arg00),
678938fd1498Szrj build_int_cst (stype, 0));
679038fd1498Szrj }
679138fd1498Szrj }
679238fd1498Szrj
679338fd1498Szrj return NULL_TREE;
679438fd1498Szrj }
679538fd1498Szrj
679638fd1498Szrj /* If CODE with arguments ARG0 and ARG1 represents a single bit
679738fd1498Szrj equality/inequality test, then return a simplified form of
679838fd1498Szrj the test using shifts and logical operations. Otherwise return
679938fd1498Szrj NULL. TYPE is the desired result type. */
680038fd1498Szrj
680138fd1498Szrj tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)680238fd1498Szrj fold_single_bit_test (location_t loc, enum tree_code code,
680338fd1498Szrj tree arg0, tree arg1, tree result_type)
680438fd1498Szrj {
680538fd1498Szrj /* If this is testing a single bit, we can optimize the test. */
680638fd1498Szrj if ((code == NE_EXPR || code == EQ_EXPR)
680738fd1498Szrj && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
680838fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1)))
680938fd1498Szrj {
681038fd1498Szrj tree inner = TREE_OPERAND (arg0, 0);
681138fd1498Szrj tree type = TREE_TYPE (arg0);
681238fd1498Szrj int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
681338fd1498Szrj scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
681438fd1498Szrj int ops_unsigned;
681538fd1498Szrj tree signed_type, unsigned_type, intermediate_type;
681638fd1498Szrj tree tem, one;
681738fd1498Szrj
681838fd1498Szrj /* First, see if we can fold the single bit test into a sign-bit
681938fd1498Szrj test. */
682038fd1498Szrj tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
682138fd1498Szrj result_type);
682238fd1498Szrj if (tem)
682338fd1498Szrj return tem;
682438fd1498Szrj
682538fd1498Szrj /* Otherwise we have (A & C) != 0 where C is a single bit,
682638fd1498Szrj convert that into ((A >> C2) & 1). Where C2 = log2(C).
682738fd1498Szrj Similarly for (A & C) == 0. */
682838fd1498Szrj
682938fd1498Szrj /* If INNER is a right shift of a constant and it plus BITNUM does
683038fd1498Szrj not overflow, adjust BITNUM and INNER. */
683138fd1498Szrj if (TREE_CODE (inner) == RSHIFT_EXPR
683238fd1498Szrj && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
683338fd1498Szrj && bitnum < TYPE_PRECISION (type)
683438fd1498Szrj && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
683538fd1498Szrj TYPE_PRECISION (type) - bitnum))
683638fd1498Szrj {
683738fd1498Szrj bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
683838fd1498Szrj inner = TREE_OPERAND (inner, 0);
683938fd1498Szrj }
684038fd1498Szrj
684138fd1498Szrj /* If we are going to be able to omit the AND below, we must do our
684238fd1498Szrj operations as unsigned. If we must use the AND, we have a choice.
684338fd1498Szrj Normally unsigned is faster, but for some machines signed is. */
684438fd1498Szrj ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
684538fd1498Szrj && !flag_syntax_only) ? 0 : 1;
684638fd1498Szrj
684738fd1498Szrj signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
684838fd1498Szrj unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
684938fd1498Szrj intermediate_type = ops_unsigned ? unsigned_type : signed_type;
685038fd1498Szrj inner = fold_convert_loc (loc, intermediate_type, inner);
685138fd1498Szrj
685238fd1498Szrj if (bitnum != 0)
685338fd1498Szrj inner = build2 (RSHIFT_EXPR, intermediate_type,
685438fd1498Szrj inner, size_int (bitnum));
685538fd1498Szrj
685638fd1498Szrj one = build_int_cst (intermediate_type, 1);
685738fd1498Szrj
685838fd1498Szrj if (code == EQ_EXPR)
685938fd1498Szrj inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
686038fd1498Szrj
686138fd1498Szrj /* Put the AND last so it can combine with more things. */
686238fd1498Szrj inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
686338fd1498Szrj
686438fd1498Szrj /* Make sure to return the proper type. */
686538fd1498Szrj inner = fold_convert_loc (loc, result_type, inner);
686638fd1498Szrj
686738fd1498Szrj return inner;
686838fd1498Szrj }
686938fd1498Szrj return NULL_TREE;
687038fd1498Szrj }
687138fd1498Szrj
687238fd1498Szrj /* Test whether it is preferable two swap two operands, ARG0 and
687338fd1498Szrj ARG1, for example because ARG0 is an integer constant and ARG1
687438fd1498Szrj isn't. */
687538fd1498Szrj
687638fd1498Szrj bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)687738fd1498Szrj tree_swap_operands_p (const_tree arg0, const_tree arg1)
687838fd1498Szrj {
687938fd1498Szrj if (CONSTANT_CLASS_P (arg1))
688038fd1498Szrj return 0;
688138fd1498Szrj if (CONSTANT_CLASS_P (arg0))
688238fd1498Szrj return 1;
688338fd1498Szrj
688438fd1498Szrj STRIP_NOPS (arg0);
688538fd1498Szrj STRIP_NOPS (arg1);
688638fd1498Szrj
688738fd1498Szrj if (TREE_CONSTANT (arg1))
688838fd1498Szrj return 0;
688938fd1498Szrj if (TREE_CONSTANT (arg0))
689038fd1498Szrj return 1;
689138fd1498Szrj
689238fd1498Szrj /* It is preferable to swap two SSA_NAME to ensure a canonical form
689338fd1498Szrj for commutative and comparison operators. Ensuring a canonical
689438fd1498Szrj form allows the optimizers to find additional redundancies without
689538fd1498Szrj having to explicitly check for both orderings. */
689638fd1498Szrj if (TREE_CODE (arg0) == SSA_NAME
689738fd1498Szrj && TREE_CODE (arg1) == SSA_NAME
689838fd1498Szrj && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
689938fd1498Szrj return 1;
690038fd1498Szrj
690138fd1498Szrj /* Put SSA_NAMEs last. */
690238fd1498Szrj if (TREE_CODE (arg1) == SSA_NAME)
690338fd1498Szrj return 0;
690438fd1498Szrj if (TREE_CODE (arg0) == SSA_NAME)
690538fd1498Szrj return 1;
690638fd1498Szrj
690738fd1498Szrj /* Put variables last. */
690838fd1498Szrj if (DECL_P (arg1))
690938fd1498Szrj return 0;
691038fd1498Szrj if (DECL_P (arg0))
691138fd1498Szrj return 1;
691238fd1498Szrj
691338fd1498Szrj return 0;
691438fd1498Szrj }
691538fd1498Szrj
691638fd1498Szrj
691738fd1498Szrj /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
691838fd1498Szrj means A >= Y && A != MAX, but in this case we know that
691938fd1498Szrj A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
692038fd1498Szrj
692138fd1498Szrj static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)692238fd1498Szrj fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
692338fd1498Szrj {
692438fd1498Szrj tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
692538fd1498Szrj
692638fd1498Szrj if (TREE_CODE (bound) == LT_EXPR)
692738fd1498Szrj a = TREE_OPERAND (bound, 0);
692838fd1498Szrj else if (TREE_CODE (bound) == GT_EXPR)
692938fd1498Szrj a = TREE_OPERAND (bound, 1);
693038fd1498Szrj else
693138fd1498Szrj return NULL_TREE;
693238fd1498Szrj
693338fd1498Szrj typea = TREE_TYPE (a);
693438fd1498Szrj if (!INTEGRAL_TYPE_P (typea)
693538fd1498Szrj && !POINTER_TYPE_P (typea))
693638fd1498Szrj return NULL_TREE;
693738fd1498Szrj
693838fd1498Szrj if (TREE_CODE (ineq) == LT_EXPR)
693938fd1498Szrj {
694038fd1498Szrj a1 = TREE_OPERAND (ineq, 1);
694138fd1498Szrj y = TREE_OPERAND (ineq, 0);
694238fd1498Szrj }
694338fd1498Szrj else if (TREE_CODE (ineq) == GT_EXPR)
694438fd1498Szrj {
694538fd1498Szrj a1 = TREE_OPERAND (ineq, 0);
694638fd1498Szrj y = TREE_OPERAND (ineq, 1);
694738fd1498Szrj }
694838fd1498Szrj else
694938fd1498Szrj return NULL_TREE;
695038fd1498Szrj
695138fd1498Szrj if (TREE_TYPE (a1) != typea)
695238fd1498Szrj return NULL_TREE;
695338fd1498Szrj
695438fd1498Szrj if (POINTER_TYPE_P (typea))
695538fd1498Szrj {
695638fd1498Szrj /* Convert the pointer types into integer before taking the difference. */
695738fd1498Szrj tree ta = fold_convert_loc (loc, ssizetype, a);
695838fd1498Szrj tree ta1 = fold_convert_loc (loc, ssizetype, a1);
695938fd1498Szrj diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
696038fd1498Szrj }
696138fd1498Szrj else
696238fd1498Szrj diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
696338fd1498Szrj
696438fd1498Szrj if (!diff || !integer_onep (diff))
696538fd1498Szrj return NULL_TREE;
696638fd1498Szrj
696738fd1498Szrj return fold_build2_loc (loc, GE_EXPR, type, a, y);
696838fd1498Szrj }
696938fd1498Szrj
697038fd1498Szrj /* Fold a sum or difference of at least one multiplication.
697138fd1498Szrj Returns the folded tree or NULL if no simplification could be made. */
697238fd1498Szrj
697338fd1498Szrj static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)697438fd1498Szrj fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
697538fd1498Szrj tree arg0, tree arg1)
697638fd1498Szrj {
697738fd1498Szrj tree arg00, arg01, arg10, arg11;
697838fd1498Szrj tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
697938fd1498Szrj
698038fd1498Szrj /* (A * C) +- (B * C) -> (A+-B) * C.
698138fd1498Szrj (A * C) +- A -> A * (C+-1).
698238fd1498Szrj We are most concerned about the case where C is a constant,
698338fd1498Szrj but other combinations show up during loop reduction. Since
698438fd1498Szrj it is not difficult, try all four possibilities. */
698538fd1498Szrj
698638fd1498Szrj if (TREE_CODE (arg0) == MULT_EXPR)
698738fd1498Szrj {
698838fd1498Szrj arg00 = TREE_OPERAND (arg0, 0);
698938fd1498Szrj arg01 = TREE_OPERAND (arg0, 1);
699038fd1498Szrj }
699138fd1498Szrj else if (TREE_CODE (arg0) == INTEGER_CST)
699238fd1498Szrj {
699338fd1498Szrj arg00 = build_one_cst (type);
699438fd1498Szrj arg01 = arg0;
699538fd1498Szrj }
699638fd1498Szrj else
699738fd1498Szrj {
699838fd1498Szrj /* We cannot generate constant 1 for fract. */
699938fd1498Szrj if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
700038fd1498Szrj return NULL_TREE;
700138fd1498Szrj arg00 = arg0;
700238fd1498Szrj arg01 = build_one_cst (type);
700338fd1498Szrj }
700438fd1498Szrj if (TREE_CODE (arg1) == MULT_EXPR)
700538fd1498Szrj {
700638fd1498Szrj arg10 = TREE_OPERAND (arg1, 0);
700738fd1498Szrj arg11 = TREE_OPERAND (arg1, 1);
700838fd1498Szrj }
700938fd1498Szrj else if (TREE_CODE (arg1) == INTEGER_CST)
701038fd1498Szrj {
701138fd1498Szrj arg10 = build_one_cst (type);
701238fd1498Szrj /* As we canonicalize A - 2 to A + -2 get rid of that sign for
701338fd1498Szrj the purpose of this canonicalization. */
701438fd1498Szrj if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
701538fd1498Szrj && negate_expr_p (arg1)
701638fd1498Szrj && code == PLUS_EXPR)
701738fd1498Szrj {
701838fd1498Szrj arg11 = negate_expr (arg1);
701938fd1498Szrj code = MINUS_EXPR;
702038fd1498Szrj }
702138fd1498Szrj else
702238fd1498Szrj arg11 = arg1;
702338fd1498Szrj }
702438fd1498Szrj else
702538fd1498Szrj {
702638fd1498Szrj /* We cannot generate constant 1 for fract. */
702738fd1498Szrj if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
702838fd1498Szrj return NULL_TREE;
702938fd1498Szrj arg10 = arg1;
703038fd1498Szrj arg11 = build_one_cst (type);
703138fd1498Szrj }
703238fd1498Szrj same = NULL_TREE;
703338fd1498Szrj
703438fd1498Szrj /* Prefer factoring a common non-constant. */
703538fd1498Szrj if (operand_equal_p (arg00, arg10, 0))
703638fd1498Szrj same = arg00, alt0 = arg01, alt1 = arg11;
703738fd1498Szrj else if (operand_equal_p (arg01, arg11, 0))
703838fd1498Szrj same = arg01, alt0 = arg00, alt1 = arg10;
703938fd1498Szrj else if (operand_equal_p (arg00, arg11, 0))
704038fd1498Szrj same = arg00, alt0 = arg01, alt1 = arg10;
704138fd1498Szrj else if (operand_equal_p (arg01, arg10, 0))
704238fd1498Szrj same = arg01, alt0 = arg00, alt1 = arg11;
704338fd1498Szrj
704438fd1498Szrj /* No identical multiplicands; see if we can find a common
704538fd1498Szrj power-of-two factor in non-power-of-two multiplies. This
704638fd1498Szrj can help in multi-dimensional array access. */
704738fd1498Szrj else if (tree_fits_shwi_p (arg01)
704838fd1498Szrj && tree_fits_shwi_p (arg11))
704938fd1498Szrj {
705038fd1498Szrj HOST_WIDE_INT int01, int11, tmp;
705138fd1498Szrj bool swap = false;
705238fd1498Szrj tree maybe_same;
705338fd1498Szrj int01 = tree_to_shwi (arg01);
705438fd1498Szrj int11 = tree_to_shwi (arg11);
705538fd1498Szrj
705638fd1498Szrj /* Move min of absolute values to int11. */
705738fd1498Szrj if (absu_hwi (int01) < absu_hwi (int11))
705838fd1498Szrj {
705938fd1498Szrj tmp = int01, int01 = int11, int11 = tmp;
706038fd1498Szrj alt0 = arg00, arg00 = arg10, arg10 = alt0;
706138fd1498Szrj maybe_same = arg01;
706238fd1498Szrj swap = true;
706338fd1498Szrj }
706438fd1498Szrj else
706538fd1498Szrj maybe_same = arg11;
706638fd1498Szrj
706738fd1498Szrj if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
706838fd1498Szrj /* The remainder should not be a constant, otherwise we
706938fd1498Szrj end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
707038fd1498Szrj increased the number of multiplications necessary. */
707138fd1498Szrj && TREE_CODE (arg10) != INTEGER_CST)
707238fd1498Szrj {
707338fd1498Szrj alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
707438fd1498Szrj build_int_cst (TREE_TYPE (arg00),
707538fd1498Szrj int01 / int11));
707638fd1498Szrj alt1 = arg10;
707738fd1498Szrj same = maybe_same;
707838fd1498Szrj if (swap)
707938fd1498Szrj maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
708038fd1498Szrj }
708138fd1498Szrj }
708238fd1498Szrj
708338fd1498Szrj if (!same)
708438fd1498Szrj return NULL_TREE;
708538fd1498Szrj
708638fd1498Szrj if (! INTEGRAL_TYPE_P (type)
708738fd1498Szrj || TYPE_OVERFLOW_WRAPS (type)
708838fd1498Szrj /* We are neither factoring zero nor minus one. */
708938fd1498Szrj || TREE_CODE (same) == INTEGER_CST)
709038fd1498Szrj return fold_build2_loc (loc, MULT_EXPR, type,
709138fd1498Szrj fold_build2_loc (loc, code, type,
709238fd1498Szrj fold_convert_loc (loc, type, alt0),
709338fd1498Szrj fold_convert_loc (loc, type, alt1)),
709438fd1498Szrj fold_convert_loc (loc, type, same));
709538fd1498Szrj
709638fd1498Szrj /* Same may be zero and thus the operation 'code' may overflow. Likewise
709738fd1498Szrj same may be minus one and thus the multiplication may overflow. Perform
709838fd1498Szrj the sum operation in an unsigned type. */
709938fd1498Szrj tree utype = unsigned_type_for (type);
710038fd1498Szrj tree tem = fold_build2_loc (loc, code, utype,
710138fd1498Szrj fold_convert_loc (loc, utype, alt0),
710238fd1498Szrj fold_convert_loc (loc, utype, alt1));
710338fd1498Szrj /* If the sum evaluated to a constant that is not -INF the multiplication
710438fd1498Szrj cannot overflow. */
710538fd1498Szrj if (TREE_CODE (tem) == INTEGER_CST
710638fd1498Szrj && (wi::to_wide (tem)
710738fd1498Szrj != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
710838fd1498Szrj return fold_build2_loc (loc, MULT_EXPR, type,
710938fd1498Szrj fold_convert (type, tem), same);
711038fd1498Szrj
711138fd1498Szrj /* Do not resort to unsigned multiplication because
711238fd1498Szrj we lose the no-overflow property of the expression. */
711338fd1498Szrj return NULL_TREE;
711438fd1498Szrj }
711538fd1498Szrj
711638fd1498Szrj /* Subroutine of native_encode_expr. Encode the INTEGER_CST
711738fd1498Szrj specified by EXPR into the buffer PTR of length LEN bytes.
711838fd1498Szrj Return the number of bytes placed in the buffer, or zero
711938fd1498Szrj upon failure. */
712038fd1498Szrj
712138fd1498Szrj static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)712238fd1498Szrj native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
712338fd1498Szrj {
712438fd1498Szrj tree type = TREE_TYPE (expr);
712538fd1498Szrj int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
712638fd1498Szrj int byte, offset, word, words;
712738fd1498Szrj unsigned char value;
712838fd1498Szrj
712938fd1498Szrj if ((off == -1 && total_bytes > len) || off >= total_bytes)
713038fd1498Szrj return 0;
713138fd1498Szrj if (off == -1)
713238fd1498Szrj off = 0;
713338fd1498Szrj
713438fd1498Szrj if (ptr == NULL)
713538fd1498Szrj /* Dry run. */
713638fd1498Szrj return MIN (len, total_bytes - off);
713738fd1498Szrj
713838fd1498Szrj words = total_bytes / UNITS_PER_WORD;
713938fd1498Szrj
714038fd1498Szrj for (byte = 0; byte < total_bytes; byte++)
714138fd1498Szrj {
714238fd1498Szrj int bitpos = byte * BITS_PER_UNIT;
714338fd1498Szrj /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
714438fd1498Szrj number of bytes. */
714538fd1498Szrj value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
714638fd1498Szrj
714738fd1498Szrj if (total_bytes > UNITS_PER_WORD)
714838fd1498Szrj {
714938fd1498Szrj word = byte / UNITS_PER_WORD;
715038fd1498Szrj if (WORDS_BIG_ENDIAN)
715138fd1498Szrj word = (words - 1) - word;
715238fd1498Szrj offset = word * UNITS_PER_WORD;
715338fd1498Szrj if (BYTES_BIG_ENDIAN)
715438fd1498Szrj offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
715538fd1498Szrj else
715638fd1498Szrj offset += byte % UNITS_PER_WORD;
715738fd1498Szrj }
715838fd1498Szrj else
715938fd1498Szrj offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
716038fd1498Szrj if (offset >= off && offset - off < len)
716138fd1498Szrj ptr[offset - off] = value;
716238fd1498Szrj }
716338fd1498Szrj return MIN (len, total_bytes - off);
716438fd1498Szrj }
716538fd1498Szrj
716638fd1498Szrj
716738fd1498Szrj /* Subroutine of native_encode_expr. Encode the FIXED_CST
716838fd1498Szrj specified by EXPR into the buffer PTR of length LEN bytes.
716938fd1498Szrj Return the number of bytes placed in the buffer, or zero
717038fd1498Szrj upon failure. */
717138fd1498Szrj
717238fd1498Szrj static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)717338fd1498Szrj native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
717438fd1498Szrj {
717538fd1498Szrj tree type = TREE_TYPE (expr);
717638fd1498Szrj scalar_mode mode = SCALAR_TYPE_MODE (type);
717738fd1498Szrj int total_bytes = GET_MODE_SIZE (mode);
717838fd1498Szrj FIXED_VALUE_TYPE value;
717938fd1498Szrj tree i_value, i_type;
718038fd1498Szrj
718138fd1498Szrj if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
718238fd1498Szrj return 0;
718338fd1498Szrj
718438fd1498Szrj i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
718538fd1498Szrj
718638fd1498Szrj if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
718738fd1498Szrj return 0;
718838fd1498Szrj
718938fd1498Szrj value = TREE_FIXED_CST (expr);
719038fd1498Szrj i_value = double_int_to_tree (i_type, value.data);
719138fd1498Szrj
719238fd1498Szrj return native_encode_int (i_value, ptr, len, off);
719338fd1498Szrj }
719438fd1498Szrj
719538fd1498Szrj
719638fd1498Szrj /* Subroutine of native_encode_expr. Encode the REAL_CST
719738fd1498Szrj specified by EXPR into the buffer PTR of length LEN bytes.
719838fd1498Szrj Return the number of bytes placed in the buffer, or zero
719938fd1498Szrj upon failure. */
720038fd1498Szrj
720138fd1498Szrj static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)720238fd1498Szrj native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
720338fd1498Szrj {
720438fd1498Szrj tree type = TREE_TYPE (expr);
720538fd1498Szrj int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
720638fd1498Szrj int byte, offset, word, words, bitpos;
720738fd1498Szrj unsigned char value;
720838fd1498Szrj
720938fd1498Szrj /* There are always 32 bits in each long, no matter the size of
721038fd1498Szrj the hosts long. We handle floating point representations with
721138fd1498Szrj up to 192 bits. */
721238fd1498Szrj long tmp[6];
721338fd1498Szrj
721438fd1498Szrj if ((off == -1 && total_bytes > len) || off >= total_bytes)
721538fd1498Szrj return 0;
721638fd1498Szrj if (off == -1)
721738fd1498Szrj off = 0;
721838fd1498Szrj
721938fd1498Szrj if (ptr == NULL)
722038fd1498Szrj /* Dry run. */
722138fd1498Szrj return MIN (len, total_bytes - off);
722238fd1498Szrj
722338fd1498Szrj words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
722438fd1498Szrj
722538fd1498Szrj real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
722638fd1498Szrj
722738fd1498Szrj for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
722838fd1498Szrj bitpos += BITS_PER_UNIT)
722938fd1498Szrj {
723038fd1498Szrj byte = (bitpos / BITS_PER_UNIT) & 3;
723138fd1498Szrj value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
723238fd1498Szrj
723338fd1498Szrj if (UNITS_PER_WORD < 4)
723438fd1498Szrj {
723538fd1498Szrj word = byte / UNITS_PER_WORD;
723638fd1498Szrj if (WORDS_BIG_ENDIAN)
723738fd1498Szrj word = (words - 1) - word;
723838fd1498Szrj offset = word * UNITS_PER_WORD;
723938fd1498Szrj if (BYTES_BIG_ENDIAN)
724038fd1498Szrj offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
724138fd1498Szrj else
724238fd1498Szrj offset += byte % UNITS_PER_WORD;
724338fd1498Szrj }
724438fd1498Szrj else
724538fd1498Szrj {
724638fd1498Szrj offset = byte;
724738fd1498Szrj if (BYTES_BIG_ENDIAN)
724838fd1498Szrj {
724938fd1498Szrj /* Reverse bytes within each long, or within the entire float
725038fd1498Szrj if it's smaller than a long (for HFmode). */
725138fd1498Szrj offset = MIN (3, total_bytes - 1) - offset;
725238fd1498Szrj gcc_assert (offset >= 0);
725338fd1498Szrj }
725438fd1498Szrj }
725538fd1498Szrj offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
725638fd1498Szrj if (offset >= off
725738fd1498Szrj && offset - off < len)
725838fd1498Szrj ptr[offset - off] = value;
725938fd1498Szrj }
726038fd1498Szrj return MIN (len, total_bytes - off);
726138fd1498Szrj }
726238fd1498Szrj
726338fd1498Szrj /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
726438fd1498Szrj specified by EXPR into the buffer PTR of length LEN bytes.
726538fd1498Szrj Return the number of bytes placed in the buffer, or zero
726638fd1498Szrj upon failure. */
726738fd1498Szrj
726838fd1498Szrj static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)726938fd1498Szrj native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
727038fd1498Szrj {
727138fd1498Szrj int rsize, isize;
727238fd1498Szrj tree part;
727338fd1498Szrj
727438fd1498Szrj part = TREE_REALPART (expr);
727538fd1498Szrj rsize = native_encode_expr (part, ptr, len, off);
727638fd1498Szrj if (off == -1 && rsize == 0)
727738fd1498Szrj return 0;
727838fd1498Szrj part = TREE_IMAGPART (expr);
727938fd1498Szrj if (off != -1)
728038fd1498Szrj off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
728138fd1498Szrj isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
728238fd1498Szrj len - rsize, off);
728338fd1498Szrj if (off == -1 && isize != rsize)
728438fd1498Szrj return 0;
728538fd1498Szrj return rsize + isize;
728638fd1498Szrj }
728738fd1498Szrj
728838fd1498Szrj
728938fd1498Szrj /* Subroutine of native_encode_expr. Encode the VECTOR_CST
729038fd1498Szrj specified by EXPR into the buffer PTR of length LEN bytes.
729138fd1498Szrj Return the number of bytes placed in the buffer, or zero
729238fd1498Szrj upon failure. */
729338fd1498Szrj
729438fd1498Szrj static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)729538fd1498Szrj native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
729638fd1498Szrj {
729738fd1498Szrj unsigned HOST_WIDE_INT i, count;
729838fd1498Szrj int size, offset;
729938fd1498Szrj tree itype, elem;
730038fd1498Szrj
730138fd1498Szrj offset = 0;
730238fd1498Szrj if (!VECTOR_CST_NELTS (expr).is_constant (&count))
730338fd1498Szrj return 0;
730438fd1498Szrj itype = TREE_TYPE (TREE_TYPE (expr));
730538fd1498Szrj size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
730638fd1498Szrj for (i = 0; i < count; i++)
730738fd1498Szrj {
730838fd1498Szrj if (off >= size)
730938fd1498Szrj {
731038fd1498Szrj off -= size;
731138fd1498Szrj continue;
731238fd1498Szrj }
731338fd1498Szrj elem = VECTOR_CST_ELT (expr, i);
731438fd1498Szrj int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
731538fd1498Szrj len - offset, off);
731638fd1498Szrj if ((off == -1 && res != size) || res == 0)
731738fd1498Szrj return 0;
731838fd1498Szrj offset += res;
731938fd1498Szrj if (offset >= len)
732038fd1498Szrj return (off == -1 && i < count - 1) ? 0 : offset;
732138fd1498Szrj if (off != -1)
732238fd1498Szrj off = 0;
732338fd1498Szrj }
732438fd1498Szrj return offset;
732538fd1498Szrj }
732638fd1498Szrj
732738fd1498Szrj
732838fd1498Szrj /* Subroutine of native_encode_expr. Encode the STRING_CST
732938fd1498Szrj specified by EXPR into the buffer PTR of length LEN bytes.
733038fd1498Szrj Return the number of bytes placed in the buffer, or zero
733138fd1498Szrj upon failure. */
733238fd1498Szrj
733338fd1498Szrj static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)733438fd1498Szrj native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
733538fd1498Szrj {
733638fd1498Szrj tree type = TREE_TYPE (expr);
733738fd1498Szrj
733838fd1498Szrj /* Wide-char strings are encoded in target byte-order so native
733938fd1498Szrj encoding them is trivial. */
734038fd1498Szrj if (BITS_PER_UNIT != CHAR_BIT
734138fd1498Szrj || TREE_CODE (type) != ARRAY_TYPE
734238fd1498Szrj || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
734338fd1498Szrj || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
734438fd1498Szrj return 0;
734538fd1498Szrj
734638fd1498Szrj HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
734738fd1498Szrj if ((off == -1 && total_bytes > len) || off >= total_bytes)
734838fd1498Szrj return 0;
734938fd1498Szrj if (off == -1)
735038fd1498Szrj off = 0;
735138fd1498Szrj if (ptr == NULL)
735238fd1498Szrj /* Dry run. */;
735338fd1498Szrj else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
735438fd1498Szrj {
735538fd1498Szrj int written = 0;
735638fd1498Szrj if (off < TREE_STRING_LENGTH (expr))
735738fd1498Szrj {
735838fd1498Szrj written = MIN (len, TREE_STRING_LENGTH (expr) - off);
735938fd1498Szrj memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
736038fd1498Szrj }
736138fd1498Szrj memset (ptr + written, 0,
736238fd1498Szrj MIN (total_bytes - written, len - written));
736338fd1498Szrj }
736438fd1498Szrj else
736538fd1498Szrj memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
736638fd1498Szrj return MIN (total_bytes - off, len);
736738fd1498Szrj }
736838fd1498Szrj
736938fd1498Szrj
737038fd1498Szrj /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
737138fd1498Szrj REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
737238fd1498Szrj buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
737338fd1498Szrj anything, just do a dry run. If OFF is not -1 then start
737438fd1498Szrj the encoding at byte offset OFF and encode at most LEN bytes.
737538fd1498Szrj Return the number of bytes placed in the buffer, or zero upon failure. */
737638fd1498Szrj
737738fd1498Szrj int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)737838fd1498Szrj native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
737938fd1498Szrj {
738038fd1498Szrj /* We don't support starting at negative offset and -1 is special. */
738138fd1498Szrj if (off < -1)
738238fd1498Szrj return 0;
738338fd1498Szrj
738438fd1498Szrj switch (TREE_CODE (expr))
738538fd1498Szrj {
738638fd1498Szrj case INTEGER_CST:
738738fd1498Szrj return native_encode_int (expr, ptr, len, off);
738838fd1498Szrj
738938fd1498Szrj case REAL_CST:
739038fd1498Szrj return native_encode_real (expr, ptr, len, off);
739138fd1498Szrj
739238fd1498Szrj case FIXED_CST:
739338fd1498Szrj return native_encode_fixed (expr, ptr, len, off);
739438fd1498Szrj
739538fd1498Szrj case COMPLEX_CST:
739638fd1498Szrj return native_encode_complex (expr, ptr, len, off);
739738fd1498Szrj
739838fd1498Szrj case VECTOR_CST:
739938fd1498Szrj return native_encode_vector (expr, ptr, len, off);
740038fd1498Szrj
740138fd1498Szrj case STRING_CST:
740238fd1498Szrj return native_encode_string (expr, ptr, len, off);
740338fd1498Szrj
740438fd1498Szrj default:
740538fd1498Szrj return 0;
740638fd1498Szrj }
740738fd1498Szrj }
740838fd1498Szrj
740938fd1498Szrj
741038fd1498Szrj /* Subroutine of native_interpret_expr. Interpret the contents of
741138fd1498Szrj the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
741238fd1498Szrj If the buffer cannot be interpreted, return NULL_TREE. */
741338fd1498Szrj
741438fd1498Szrj static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)741538fd1498Szrj native_interpret_int (tree type, const unsigned char *ptr, int len)
741638fd1498Szrj {
741738fd1498Szrj int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
741838fd1498Szrj
741938fd1498Szrj if (total_bytes > len
742038fd1498Szrj || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
742138fd1498Szrj return NULL_TREE;
742238fd1498Szrj
742338fd1498Szrj wide_int result = wi::from_buffer (ptr, total_bytes);
742438fd1498Szrj
742538fd1498Szrj return wide_int_to_tree (type, result);
742638fd1498Szrj }
742738fd1498Szrj
742838fd1498Szrj
742938fd1498Szrj /* Subroutine of native_interpret_expr. Interpret the contents of
743038fd1498Szrj the buffer PTR of length LEN as a FIXED_CST of type TYPE.
743138fd1498Szrj If the buffer cannot be interpreted, return NULL_TREE. */
743238fd1498Szrj
743338fd1498Szrj static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)743438fd1498Szrj native_interpret_fixed (tree type, const unsigned char *ptr, int len)
743538fd1498Szrj {
743638fd1498Szrj scalar_mode mode = SCALAR_TYPE_MODE (type);
743738fd1498Szrj int total_bytes = GET_MODE_SIZE (mode);
743838fd1498Szrj double_int result;
743938fd1498Szrj FIXED_VALUE_TYPE fixed_value;
744038fd1498Szrj
744138fd1498Szrj if (total_bytes > len
744238fd1498Szrj || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
744338fd1498Szrj return NULL_TREE;
744438fd1498Szrj
744538fd1498Szrj result = double_int::from_buffer (ptr, total_bytes);
744638fd1498Szrj fixed_value = fixed_from_double_int (result, mode);
744738fd1498Szrj
744838fd1498Szrj return build_fixed (type, fixed_value);
744938fd1498Szrj }
745038fd1498Szrj
745138fd1498Szrj
745238fd1498Szrj /* Subroutine of native_interpret_expr. Interpret the contents of
745338fd1498Szrj the buffer PTR of length LEN as a REAL_CST of type TYPE.
745438fd1498Szrj If the buffer cannot be interpreted, return NULL_TREE. */
745538fd1498Szrj
745638fd1498Szrj static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)745738fd1498Szrj native_interpret_real (tree type, const unsigned char *ptr, int len)
745838fd1498Szrj {
745938fd1498Szrj scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
746038fd1498Szrj int total_bytes = GET_MODE_SIZE (mode);
746138fd1498Szrj unsigned char value;
746238fd1498Szrj /* There are always 32 bits in each long, no matter the size of
746338fd1498Szrj the hosts long. We handle floating point representations with
746438fd1498Szrj up to 192 bits. */
746538fd1498Szrj REAL_VALUE_TYPE r;
746638fd1498Szrj long tmp[6];
746738fd1498Szrj
746838fd1498Szrj if (total_bytes > len || total_bytes > 24)
746938fd1498Szrj return NULL_TREE;
747038fd1498Szrj int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
747138fd1498Szrj
747238fd1498Szrj memset (tmp, 0, sizeof (tmp));
747338fd1498Szrj for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
747438fd1498Szrj bitpos += BITS_PER_UNIT)
747538fd1498Szrj {
747638fd1498Szrj /* Both OFFSET and BYTE index within a long;
747738fd1498Szrj bitpos indexes the whole float. */
747838fd1498Szrj int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
747938fd1498Szrj if (UNITS_PER_WORD < 4)
748038fd1498Szrj {
748138fd1498Szrj int word = byte / UNITS_PER_WORD;
748238fd1498Szrj if (WORDS_BIG_ENDIAN)
748338fd1498Szrj word = (words - 1) - word;
748438fd1498Szrj offset = word * UNITS_PER_WORD;
748538fd1498Szrj if (BYTES_BIG_ENDIAN)
748638fd1498Szrj offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
748738fd1498Szrj else
748838fd1498Szrj offset += byte % UNITS_PER_WORD;
748938fd1498Szrj }
749038fd1498Szrj else
749138fd1498Szrj {
749238fd1498Szrj offset = byte;
749338fd1498Szrj if (BYTES_BIG_ENDIAN)
749438fd1498Szrj {
749538fd1498Szrj /* Reverse bytes within each long, or within the entire float
749638fd1498Szrj if it's smaller than a long (for HFmode). */
749738fd1498Szrj offset = MIN (3, total_bytes - 1) - offset;
749838fd1498Szrj gcc_assert (offset >= 0);
749938fd1498Szrj }
750038fd1498Szrj }
750138fd1498Szrj value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
750238fd1498Szrj
750338fd1498Szrj tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
750438fd1498Szrj }
750538fd1498Szrj
750638fd1498Szrj real_from_target (&r, tmp, mode);
750738fd1498Szrj return build_real (type, r);
750838fd1498Szrj }
750938fd1498Szrj
751038fd1498Szrj
751138fd1498Szrj /* Subroutine of native_interpret_expr. Interpret the contents of
751238fd1498Szrj the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
751338fd1498Szrj If the buffer cannot be interpreted, return NULL_TREE. */
751438fd1498Szrj
751538fd1498Szrj static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)751638fd1498Szrj native_interpret_complex (tree type, const unsigned char *ptr, int len)
751738fd1498Szrj {
751838fd1498Szrj tree etype, rpart, ipart;
751938fd1498Szrj int size;
752038fd1498Szrj
752138fd1498Szrj etype = TREE_TYPE (type);
752238fd1498Szrj size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
752338fd1498Szrj if (size * 2 > len)
752438fd1498Szrj return NULL_TREE;
752538fd1498Szrj rpart = native_interpret_expr (etype, ptr, size);
752638fd1498Szrj if (!rpart)
752738fd1498Szrj return NULL_TREE;
752838fd1498Szrj ipart = native_interpret_expr (etype, ptr+size, size);
752938fd1498Szrj if (!ipart)
753038fd1498Szrj return NULL_TREE;
753138fd1498Szrj return build_complex (type, rpart, ipart);
753238fd1498Szrj }
753338fd1498Szrj
753438fd1498Szrj
753538fd1498Szrj /* Subroutine of native_interpret_expr. Interpret the contents of
753638fd1498Szrj the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
753738fd1498Szrj If the buffer cannot be interpreted, return NULL_TREE. */
753838fd1498Szrj
753938fd1498Szrj static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)754038fd1498Szrj native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
754138fd1498Szrj {
754238fd1498Szrj tree etype, elem;
754338fd1498Szrj unsigned int i, size;
754438fd1498Szrj unsigned HOST_WIDE_INT count;
754538fd1498Szrj
754638fd1498Szrj etype = TREE_TYPE (type);
754738fd1498Szrj size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
754838fd1498Szrj if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
754938fd1498Szrj || size * count > len)
755038fd1498Szrj return NULL_TREE;
755138fd1498Szrj
755238fd1498Szrj tree_vector_builder elements (type, count, 1);
755338fd1498Szrj for (i = 0; i < count; ++i)
755438fd1498Szrj {
755538fd1498Szrj elem = native_interpret_expr (etype, ptr+(i*size), size);
755638fd1498Szrj if (!elem)
755738fd1498Szrj return NULL_TREE;
755838fd1498Szrj elements.quick_push (elem);
755938fd1498Szrj }
756038fd1498Szrj return elements.build ();
756138fd1498Szrj }
756238fd1498Szrj
756338fd1498Szrj
756438fd1498Szrj /* Subroutine of fold_view_convert_expr. Interpret the contents of
756538fd1498Szrj the buffer PTR of length LEN as a constant of type TYPE. For
756638fd1498Szrj INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
756738fd1498Szrj we return a REAL_CST, etc... If the buffer cannot be interpreted,
756838fd1498Szrj return NULL_TREE. */
756938fd1498Szrj
757038fd1498Szrj tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)757138fd1498Szrj native_interpret_expr (tree type, const unsigned char *ptr, int len)
757238fd1498Szrj {
757338fd1498Szrj switch (TREE_CODE (type))
757438fd1498Szrj {
757538fd1498Szrj case INTEGER_TYPE:
757638fd1498Szrj case ENUMERAL_TYPE:
757738fd1498Szrj case BOOLEAN_TYPE:
757838fd1498Szrj case POINTER_TYPE:
757938fd1498Szrj case REFERENCE_TYPE:
758038fd1498Szrj return native_interpret_int (type, ptr, len);
758138fd1498Szrj
758238fd1498Szrj case REAL_TYPE:
758338fd1498Szrj return native_interpret_real (type, ptr, len);
758438fd1498Szrj
758538fd1498Szrj case FIXED_POINT_TYPE:
758638fd1498Szrj return native_interpret_fixed (type, ptr, len);
758738fd1498Szrj
758838fd1498Szrj case COMPLEX_TYPE:
758938fd1498Szrj return native_interpret_complex (type, ptr, len);
759038fd1498Szrj
759138fd1498Szrj case VECTOR_TYPE:
759238fd1498Szrj return native_interpret_vector (type, ptr, len);
759338fd1498Szrj
759438fd1498Szrj default:
759538fd1498Szrj return NULL_TREE;
759638fd1498Szrj }
759738fd1498Szrj }
759838fd1498Szrj
759938fd1498Szrj /* Returns true if we can interpret the contents of a native encoding
760038fd1498Szrj as TYPE. */
760138fd1498Szrj
760238fd1498Szrj static bool
can_native_interpret_type_p(tree type)760338fd1498Szrj can_native_interpret_type_p (tree type)
760438fd1498Szrj {
760538fd1498Szrj switch (TREE_CODE (type))
760638fd1498Szrj {
760738fd1498Szrj case INTEGER_TYPE:
760838fd1498Szrj case ENUMERAL_TYPE:
760938fd1498Szrj case BOOLEAN_TYPE:
761038fd1498Szrj case POINTER_TYPE:
761138fd1498Szrj case REFERENCE_TYPE:
761238fd1498Szrj case FIXED_POINT_TYPE:
761338fd1498Szrj case REAL_TYPE:
761438fd1498Szrj case COMPLEX_TYPE:
761538fd1498Szrj case VECTOR_TYPE:
761638fd1498Szrj return true;
761738fd1498Szrj default:
761838fd1498Szrj return false;
761938fd1498Szrj }
762038fd1498Szrj }
762138fd1498Szrj
762238fd1498Szrj
762338fd1498Szrj /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
762438fd1498Szrj TYPE at compile-time. If we're unable to perform the conversion
762538fd1498Szrj return NULL_TREE. */
762638fd1498Szrj
762738fd1498Szrj static tree
fold_view_convert_expr(tree type,tree expr)762838fd1498Szrj fold_view_convert_expr (tree type, tree expr)
762938fd1498Szrj {
763038fd1498Szrj /* We support up to 512-bit values (for V8DFmode). */
763138fd1498Szrj unsigned char buffer[64];
763238fd1498Szrj int len;
763338fd1498Szrj
763438fd1498Szrj /* Check that the host and target are sane. */
763538fd1498Szrj if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
763638fd1498Szrj return NULL_TREE;
763738fd1498Szrj
763838fd1498Szrj len = native_encode_expr (expr, buffer, sizeof (buffer));
763938fd1498Szrj if (len == 0)
764038fd1498Szrj return NULL_TREE;
764138fd1498Szrj
764238fd1498Szrj return native_interpret_expr (type, buffer, len);
764338fd1498Szrj }
764438fd1498Szrj
764538fd1498Szrj /* Build an expression for the address of T. Folds away INDIRECT_REF
764638fd1498Szrj to avoid confusing the gimplify process. */
764738fd1498Szrj
764838fd1498Szrj tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)764938fd1498Szrj build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
765038fd1498Szrj {
765138fd1498Szrj /* The size of the object is not relevant when talking about its address. */
765238fd1498Szrj if (TREE_CODE (t) == WITH_SIZE_EXPR)
765338fd1498Szrj t = TREE_OPERAND (t, 0);
765438fd1498Szrj
765538fd1498Szrj if (TREE_CODE (t) == INDIRECT_REF)
765638fd1498Szrj {
765738fd1498Szrj t = TREE_OPERAND (t, 0);
765838fd1498Szrj
765938fd1498Szrj if (TREE_TYPE (t) != ptrtype)
766038fd1498Szrj t = build1_loc (loc, NOP_EXPR, ptrtype, t);
766138fd1498Szrj }
766238fd1498Szrj else if (TREE_CODE (t) == MEM_REF
766338fd1498Szrj && integer_zerop (TREE_OPERAND (t, 1)))
766438fd1498Szrj return TREE_OPERAND (t, 0);
766538fd1498Szrj else if (TREE_CODE (t) == MEM_REF
766638fd1498Szrj && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
766738fd1498Szrj return fold_binary (POINTER_PLUS_EXPR, ptrtype,
766838fd1498Szrj TREE_OPERAND (t, 0),
766938fd1498Szrj convert_to_ptrofftype (TREE_OPERAND (t, 1)));
767038fd1498Szrj else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
767138fd1498Szrj {
767238fd1498Szrj t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
767338fd1498Szrj
767438fd1498Szrj if (TREE_TYPE (t) != ptrtype)
767538fd1498Szrj t = fold_convert_loc (loc, ptrtype, t);
767638fd1498Szrj }
767738fd1498Szrj else
767838fd1498Szrj t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
767938fd1498Szrj
768038fd1498Szrj return t;
768138fd1498Szrj }
768238fd1498Szrj
768338fd1498Szrj /* Build an expression for the address of T. */
768438fd1498Szrj
768538fd1498Szrj tree
build_fold_addr_expr_loc(location_t loc,tree t)768638fd1498Szrj build_fold_addr_expr_loc (location_t loc, tree t)
768738fd1498Szrj {
768838fd1498Szrj tree ptrtype = build_pointer_type (TREE_TYPE (t));
768938fd1498Szrj
769038fd1498Szrj return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
769138fd1498Szrj }
769238fd1498Szrj
769338fd1498Szrj /* Fold a unary expression of code CODE and type TYPE with operand
769438fd1498Szrj OP0. Return the folded expression if folding is successful.
769538fd1498Szrj Otherwise, return NULL_TREE. */
769638fd1498Szrj
769738fd1498Szrj tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)769838fd1498Szrj fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
769938fd1498Szrj {
770038fd1498Szrj tree tem;
770138fd1498Szrj tree arg0;
770238fd1498Szrj enum tree_code_class kind = TREE_CODE_CLASS (code);
770338fd1498Szrj
770438fd1498Szrj gcc_assert (IS_EXPR_CODE_CLASS (kind)
770538fd1498Szrj && TREE_CODE_LENGTH (code) == 1);
770638fd1498Szrj
770738fd1498Szrj arg0 = op0;
770838fd1498Szrj if (arg0)
770938fd1498Szrj {
771038fd1498Szrj if (CONVERT_EXPR_CODE_P (code)
771138fd1498Szrj || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
771238fd1498Szrj {
771338fd1498Szrj /* Don't use STRIP_NOPS, because signedness of argument type
771438fd1498Szrj matters. */
771538fd1498Szrj STRIP_SIGN_NOPS (arg0);
771638fd1498Szrj }
771738fd1498Szrj else
771838fd1498Szrj {
771938fd1498Szrj /* Strip any conversions that don't change the mode. This
772038fd1498Szrj is safe for every expression, except for a comparison
772138fd1498Szrj expression because its signedness is derived from its
772238fd1498Szrj operands.
772338fd1498Szrj
772438fd1498Szrj Note that this is done as an internal manipulation within
772538fd1498Szrj the constant folder, in order to find the simplest
772638fd1498Szrj representation of the arguments so that their form can be
772738fd1498Szrj studied. In any cases, the appropriate type conversions
772838fd1498Szrj should be put back in the tree that will get out of the
772938fd1498Szrj constant folder. */
773038fd1498Szrj STRIP_NOPS (arg0);
773138fd1498Szrj }
773238fd1498Szrj
773338fd1498Szrj if (CONSTANT_CLASS_P (arg0))
773438fd1498Szrj {
773538fd1498Szrj tree tem = const_unop (code, type, arg0);
773638fd1498Szrj if (tem)
773738fd1498Szrj {
773838fd1498Szrj if (TREE_TYPE (tem) != type)
773938fd1498Szrj tem = fold_convert_loc (loc, type, tem);
774038fd1498Szrj return tem;
774138fd1498Szrj }
774238fd1498Szrj }
774338fd1498Szrj }
774438fd1498Szrj
774538fd1498Szrj tem = generic_simplify (loc, code, type, op0);
774638fd1498Szrj if (tem)
774738fd1498Szrj return tem;
774838fd1498Szrj
774938fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_unary)
775038fd1498Szrj {
775138fd1498Szrj if (TREE_CODE (arg0) == COMPOUND_EXPR)
775238fd1498Szrj return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
775338fd1498Szrj fold_build1_loc (loc, code, type,
775438fd1498Szrj fold_convert_loc (loc, TREE_TYPE (op0),
775538fd1498Szrj TREE_OPERAND (arg0, 1))));
775638fd1498Szrj else if (TREE_CODE (arg0) == COND_EXPR)
775738fd1498Szrj {
775838fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
775938fd1498Szrj tree arg02 = TREE_OPERAND (arg0, 2);
776038fd1498Szrj if (! VOID_TYPE_P (TREE_TYPE (arg01)))
776138fd1498Szrj arg01 = fold_build1_loc (loc, code, type,
776238fd1498Szrj fold_convert_loc (loc,
776338fd1498Szrj TREE_TYPE (op0), arg01));
776438fd1498Szrj if (! VOID_TYPE_P (TREE_TYPE (arg02)))
776538fd1498Szrj arg02 = fold_build1_loc (loc, code, type,
776638fd1498Szrj fold_convert_loc (loc,
776738fd1498Szrj TREE_TYPE (op0), arg02));
776838fd1498Szrj tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
776938fd1498Szrj arg01, arg02);
777038fd1498Szrj
777138fd1498Szrj /* If this was a conversion, and all we did was to move into
777238fd1498Szrj inside the COND_EXPR, bring it back out. But leave it if
777338fd1498Szrj it is a conversion from integer to integer and the
777438fd1498Szrj result precision is no wider than a word since such a
777538fd1498Szrj conversion is cheap and may be optimized away by combine,
777638fd1498Szrj while it couldn't if it were outside the COND_EXPR. Then return
777738fd1498Szrj so we don't get into an infinite recursion loop taking the
777838fd1498Szrj conversion out and then back in. */
777938fd1498Szrj
778038fd1498Szrj if ((CONVERT_EXPR_CODE_P (code)
778138fd1498Szrj || code == NON_LVALUE_EXPR)
778238fd1498Szrj && TREE_CODE (tem) == COND_EXPR
778338fd1498Szrj && TREE_CODE (TREE_OPERAND (tem, 1)) == code
778438fd1498Szrj && TREE_CODE (TREE_OPERAND (tem, 2)) == code
778538fd1498Szrj && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
778638fd1498Szrj && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
778738fd1498Szrj && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
778838fd1498Szrj == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
778938fd1498Szrj && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
779038fd1498Szrj && (INTEGRAL_TYPE_P
779138fd1498Szrj (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
779238fd1498Szrj && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
779338fd1498Szrj || flag_syntax_only))
779438fd1498Szrj tem = build1_loc (loc, code, type,
779538fd1498Szrj build3 (COND_EXPR,
779638fd1498Szrj TREE_TYPE (TREE_OPERAND
779738fd1498Szrj (TREE_OPERAND (tem, 1), 0)),
779838fd1498Szrj TREE_OPERAND (tem, 0),
779938fd1498Szrj TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
780038fd1498Szrj TREE_OPERAND (TREE_OPERAND (tem, 2),
780138fd1498Szrj 0)));
780238fd1498Szrj return tem;
780338fd1498Szrj }
780438fd1498Szrj }
780538fd1498Szrj
780638fd1498Szrj switch (code)
780738fd1498Szrj {
780838fd1498Szrj case NON_LVALUE_EXPR:
780938fd1498Szrj if (!maybe_lvalue_p (op0))
781038fd1498Szrj return fold_convert_loc (loc, type, op0);
781138fd1498Szrj return NULL_TREE;
781238fd1498Szrj
781338fd1498Szrj CASE_CONVERT:
781438fd1498Szrj case FLOAT_EXPR:
781538fd1498Szrj case FIX_TRUNC_EXPR:
781638fd1498Szrj if (COMPARISON_CLASS_P (op0))
781738fd1498Szrj {
781838fd1498Szrj /* If we have (type) (a CMP b) and type is an integral type, return
781938fd1498Szrj new expression involving the new type. Canonicalize
782038fd1498Szrj (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
782138fd1498Szrj non-integral type.
782238fd1498Szrj Do not fold the result as that would not simplify further, also
782338fd1498Szrj folding again results in recursions. */
782438fd1498Szrj if (TREE_CODE (type) == BOOLEAN_TYPE)
782538fd1498Szrj return build2_loc (loc, TREE_CODE (op0), type,
782638fd1498Szrj TREE_OPERAND (op0, 0),
782738fd1498Szrj TREE_OPERAND (op0, 1));
782838fd1498Szrj else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
782938fd1498Szrj && TREE_CODE (type) != VECTOR_TYPE)
783038fd1498Szrj return build3_loc (loc, COND_EXPR, type, op0,
783138fd1498Szrj constant_boolean_node (true, type),
783238fd1498Szrj constant_boolean_node (false, type));
783338fd1498Szrj }
783438fd1498Szrj
783538fd1498Szrj /* Handle (T *)&A.B.C for A being of type T and B and C
783638fd1498Szrj living at offset zero. This occurs frequently in
783738fd1498Szrj C++ upcasting and then accessing the base. */
783838fd1498Szrj if (TREE_CODE (op0) == ADDR_EXPR
783938fd1498Szrj && POINTER_TYPE_P (type)
784038fd1498Szrj && handled_component_p (TREE_OPERAND (op0, 0)))
784138fd1498Szrj {
784238fd1498Szrj poly_int64 bitsize, bitpos;
784338fd1498Szrj tree offset;
784438fd1498Szrj machine_mode mode;
784538fd1498Szrj int unsignedp, reversep, volatilep;
784638fd1498Szrj tree base
784738fd1498Szrj = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
784838fd1498Szrj &offset, &mode, &unsignedp, &reversep,
784938fd1498Szrj &volatilep);
785038fd1498Szrj /* If the reference was to a (constant) zero offset, we can use
785138fd1498Szrj the address of the base if it has the same base type
785238fd1498Szrj as the result type and the pointer type is unqualified. */
785338fd1498Szrj if (!offset
785438fd1498Szrj && known_eq (bitpos, 0)
785538fd1498Szrj && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
785638fd1498Szrj == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
785738fd1498Szrj && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
785838fd1498Szrj return fold_convert_loc (loc, type,
785938fd1498Szrj build_fold_addr_expr_loc (loc, base));
786038fd1498Szrj }
786138fd1498Szrj
786238fd1498Szrj if (TREE_CODE (op0) == MODIFY_EXPR
786338fd1498Szrj && TREE_CONSTANT (TREE_OPERAND (op0, 1))
786438fd1498Szrj /* Detect assigning a bitfield. */
786538fd1498Szrj && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
786638fd1498Szrj && DECL_BIT_FIELD
786738fd1498Szrj (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
786838fd1498Szrj {
786938fd1498Szrj /* Don't leave an assignment inside a conversion
787038fd1498Szrj unless assigning a bitfield. */
787138fd1498Szrj tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
787238fd1498Szrj /* First do the assignment, then return converted constant. */
787338fd1498Szrj tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
787438fd1498Szrj TREE_NO_WARNING (tem) = 1;
787538fd1498Szrj TREE_USED (tem) = 1;
787638fd1498Szrj return tem;
787738fd1498Szrj }
787838fd1498Szrj
787938fd1498Szrj /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
788038fd1498Szrj constants (if x has signed type, the sign bit cannot be set
788138fd1498Szrj in c). This folds extension into the BIT_AND_EXPR.
788238fd1498Szrj ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
788338fd1498Szrj very likely don't have maximal range for their precision and this
788438fd1498Szrj transformation effectively doesn't preserve non-maximal ranges. */
788538fd1498Szrj if (TREE_CODE (type) == INTEGER_TYPE
788638fd1498Szrj && TREE_CODE (op0) == BIT_AND_EXPR
788738fd1498Szrj && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
788838fd1498Szrj {
788938fd1498Szrj tree and_expr = op0;
789038fd1498Szrj tree and0 = TREE_OPERAND (and_expr, 0);
789138fd1498Szrj tree and1 = TREE_OPERAND (and_expr, 1);
789238fd1498Szrj int change = 0;
789338fd1498Szrj
789438fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
789538fd1498Szrj || (TYPE_PRECISION (type)
789638fd1498Szrj <= TYPE_PRECISION (TREE_TYPE (and_expr))))
789738fd1498Szrj change = 1;
789838fd1498Szrj else if (TYPE_PRECISION (TREE_TYPE (and1))
789938fd1498Szrj <= HOST_BITS_PER_WIDE_INT
790038fd1498Szrj && tree_fits_uhwi_p (and1))
790138fd1498Szrj {
790238fd1498Szrj unsigned HOST_WIDE_INT cst;
790338fd1498Szrj
790438fd1498Szrj cst = tree_to_uhwi (and1);
790538fd1498Szrj cst &= HOST_WIDE_INT_M1U
790638fd1498Szrj << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
790738fd1498Szrj change = (cst == 0);
790838fd1498Szrj if (change
790938fd1498Szrj && !flag_syntax_only
791038fd1498Szrj && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
791138fd1498Szrj == ZERO_EXTEND))
791238fd1498Szrj {
791338fd1498Szrj tree uns = unsigned_type_for (TREE_TYPE (and0));
791438fd1498Szrj and0 = fold_convert_loc (loc, uns, and0);
791538fd1498Szrj and1 = fold_convert_loc (loc, uns, and1);
791638fd1498Szrj }
791738fd1498Szrj }
791838fd1498Szrj if (change)
791938fd1498Szrj {
792038fd1498Szrj tem = force_fit_type (type, wi::to_widest (and1), 0,
792138fd1498Szrj TREE_OVERFLOW (and1));
792238fd1498Szrj return fold_build2_loc (loc, BIT_AND_EXPR, type,
792338fd1498Szrj fold_convert_loc (loc, type, and0), tem);
792438fd1498Szrj }
792538fd1498Szrj }
792638fd1498Szrj
792738fd1498Szrj /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
792838fd1498Szrj cast (T1)X will fold away. We assume that this happens when X itself
792938fd1498Szrj is a cast. */
793038fd1498Szrj if (POINTER_TYPE_P (type)
793138fd1498Szrj && TREE_CODE (arg0) == POINTER_PLUS_EXPR
793238fd1498Szrj && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
793338fd1498Szrj {
793438fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
793538fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
793638fd1498Szrj
793738fd1498Szrj return fold_build_pointer_plus_loc
793838fd1498Szrj (loc, fold_convert_loc (loc, type, arg00), arg01);
793938fd1498Szrj }
794038fd1498Szrj
794138fd1498Szrj /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
794238fd1498Szrj of the same precision, and X is an integer type not narrower than
794338fd1498Szrj types T1 or T2, i.e. the cast (T2)X isn't an extension. */
794438fd1498Szrj if (INTEGRAL_TYPE_P (type)
794538fd1498Szrj && TREE_CODE (op0) == BIT_NOT_EXPR
794638fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (op0))
794738fd1498Szrj && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
794838fd1498Szrj && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
794938fd1498Szrj {
795038fd1498Szrj tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
795138fd1498Szrj if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
795238fd1498Szrj && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
795338fd1498Szrj return fold_build1_loc (loc, BIT_NOT_EXPR, type,
795438fd1498Szrj fold_convert_loc (loc, type, tem));
795538fd1498Szrj }
795638fd1498Szrj
795738fd1498Szrj /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
795838fd1498Szrj type of X and Y (integer types only). */
795938fd1498Szrj if (INTEGRAL_TYPE_P (type)
796038fd1498Szrj && TREE_CODE (op0) == MULT_EXPR
796138fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (op0))
796238fd1498Szrj && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
796338fd1498Szrj {
796438fd1498Szrj /* Be careful not to introduce new overflows. */
796538fd1498Szrj tree mult_type;
796638fd1498Szrj if (TYPE_OVERFLOW_WRAPS (type))
796738fd1498Szrj mult_type = type;
796838fd1498Szrj else
796938fd1498Szrj mult_type = unsigned_type_for (type);
797038fd1498Szrj
797138fd1498Szrj if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
797238fd1498Szrj {
797338fd1498Szrj tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
797438fd1498Szrj fold_convert_loc (loc, mult_type,
797538fd1498Szrj TREE_OPERAND (op0, 0)),
797638fd1498Szrj fold_convert_loc (loc, mult_type,
797738fd1498Szrj TREE_OPERAND (op0, 1)));
797838fd1498Szrj return fold_convert_loc (loc, type, tem);
797938fd1498Szrj }
798038fd1498Szrj }
798138fd1498Szrj
798238fd1498Szrj return NULL_TREE;
798338fd1498Szrj
798438fd1498Szrj case VIEW_CONVERT_EXPR:
798538fd1498Szrj if (TREE_CODE (op0) == MEM_REF)
798638fd1498Szrj {
798738fd1498Szrj if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
798838fd1498Szrj type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
798938fd1498Szrj tem = fold_build2_loc (loc, MEM_REF, type,
799038fd1498Szrj TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
799138fd1498Szrj REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
799238fd1498Szrj return tem;
799338fd1498Szrj }
799438fd1498Szrj
799538fd1498Szrj return NULL_TREE;
799638fd1498Szrj
799738fd1498Szrj case NEGATE_EXPR:
799838fd1498Szrj tem = fold_negate_expr (loc, arg0);
799938fd1498Szrj if (tem)
800038fd1498Szrj return fold_convert_loc (loc, type, tem);
800138fd1498Szrj return NULL_TREE;
800238fd1498Szrj
800338fd1498Szrj case ABS_EXPR:
800438fd1498Szrj /* Convert fabs((double)float) into (double)fabsf(float). */
800538fd1498Szrj if (TREE_CODE (arg0) == NOP_EXPR
800638fd1498Szrj && TREE_CODE (type) == REAL_TYPE)
800738fd1498Szrj {
800838fd1498Szrj tree targ0 = strip_float_extensions (arg0);
800938fd1498Szrj if (targ0 != arg0)
801038fd1498Szrj return fold_convert_loc (loc, type,
801138fd1498Szrj fold_build1_loc (loc, ABS_EXPR,
801238fd1498Szrj TREE_TYPE (targ0),
801338fd1498Szrj targ0));
801438fd1498Szrj }
801538fd1498Szrj return NULL_TREE;
801638fd1498Szrj
801738fd1498Szrj case BIT_NOT_EXPR:
801838fd1498Szrj /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
801938fd1498Szrj if (TREE_CODE (arg0) == BIT_XOR_EXPR
802038fd1498Szrj && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
802138fd1498Szrj fold_convert_loc (loc, type,
802238fd1498Szrj TREE_OPERAND (arg0, 0)))))
802338fd1498Szrj return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
802438fd1498Szrj fold_convert_loc (loc, type,
802538fd1498Szrj TREE_OPERAND (arg0, 1)));
802638fd1498Szrj else if (TREE_CODE (arg0) == BIT_XOR_EXPR
802738fd1498Szrj && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
802838fd1498Szrj fold_convert_loc (loc, type,
802938fd1498Szrj TREE_OPERAND (arg0, 1)))))
803038fd1498Szrj return fold_build2_loc (loc, BIT_XOR_EXPR, type,
803138fd1498Szrj fold_convert_loc (loc, type,
803238fd1498Szrj TREE_OPERAND (arg0, 0)), tem);
803338fd1498Szrj
803438fd1498Szrj return NULL_TREE;
803538fd1498Szrj
803638fd1498Szrj case TRUTH_NOT_EXPR:
803738fd1498Szrj /* Note that the operand of this must be an int
803838fd1498Szrj and its values must be 0 or 1.
803938fd1498Szrj ("true" is a fixed value perhaps depending on the language,
804038fd1498Szrj but we don't handle values other than 1 correctly yet.) */
804138fd1498Szrj tem = fold_truth_not_expr (loc, arg0);
804238fd1498Szrj if (!tem)
804338fd1498Szrj return NULL_TREE;
804438fd1498Szrj return fold_convert_loc (loc, type, tem);
804538fd1498Szrj
804638fd1498Szrj case INDIRECT_REF:
804738fd1498Szrj /* Fold *&X to X if X is an lvalue. */
804838fd1498Szrj if (TREE_CODE (op0) == ADDR_EXPR)
804938fd1498Szrj {
805038fd1498Szrj tree op00 = TREE_OPERAND (op0, 0);
805138fd1498Szrj if ((VAR_P (op00)
805238fd1498Szrj || TREE_CODE (op00) == PARM_DECL
805338fd1498Szrj || TREE_CODE (op00) == RESULT_DECL)
805438fd1498Szrj && !TREE_READONLY (op00))
805538fd1498Szrj return op00;
805638fd1498Szrj }
805738fd1498Szrj return NULL_TREE;
805838fd1498Szrj
805938fd1498Szrj default:
806038fd1498Szrj return NULL_TREE;
806138fd1498Szrj } /* switch (code) */
806238fd1498Szrj }
806338fd1498Szrj
806438fd1498Szrj
806538fd1498Szrj /* If the operation was a conversion do _not_ mark a resulting constant
806638fd1498Szrj with TREE_OVERFLOW if the original constant was not. These conversions
806738fd1498Szrj have implementation defined behavior and retaining the TREE_OVERFLOW
806838fd1498Szrj flag here would confuse later passes such as VRP. */
806938fd1498Szrj tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)807038fd1498Szrj fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
807138fd1498Szrj tree type, tree op0)
807238fd1498Szrj {
807338fd1498Szrj tree res = fold_unary_loc (loc, code, type, op0);
807438fd1498Szrj if (res
807538fd1498Szrj && TREE_CODE (res) == INTEGER_CST
807638fd1498Szrj && TREE_CODE (op0) == INTEGER_CST
807738fd1498Szrj && CONVERT_EXPR_CODE_P (code))
807838fd1498Szrj TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
807938fd1498Szrj
808038fd1498Szrj return res;
808138fd1498Szrj }
808238fd1498Szrj
808338fd1498Szrj /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
808438fd1498Szrj operands OP0 and OP1. LOC is the location of the resulting expression.
808538fd1498Szrj ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
808638fd1498Szrj Return the folded expression if folding is successful. Otherwise,
808738fd1498Szrj return NULL_TREE. */
808838fd1498Szrj static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)808938fd1498Szrj fold_truth_andor (location_t loc, enum tree_code code, tree type,
809038fd1498Szrj tree arg0, tree arg1, tree op0, tree op1)
809138fd1498Szrj {
809238fd1498Szrj tree tem;
809338fd1498Szrj
809438fd1498Szrj /* We only do these simplifications if we are optimizing. */
809538fd1498Szrj if (!optimize)
809638fd1498Szrj return NULL_TREE;
809738fd1498Szrj
809838fd1498Szrj /* Check for things like (A || B) && (A || C). We can convert this
809938fd1498Szrj to A || (B && C). Note that either operator can be any of the four
810038fd1498Szrj truth and/or operations and the transformation will still be
810138fd1498Szrj valid. Also note that we only care about order for the
810238fd1498Szrj ANDIF and ORIF operators. If B contains side effects, this
810338fd1498Szrj might change the truth-value of A. */
810438fd1498Szrj if (TREE_CODE (arg0) == TREE_CODE (arg1)
810538fd1498Szrj && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
810638fd1498Szrj || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
810738fd1498Szrj || TREE_CODE (arg0) == TRUTH_AND_EXPR
810838fd1498Szrj || TREE_CODE (arg0) == TRUTH_OR_EXPR)
810938fd1498Szrj && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
811038fd1498Szrj {
811138fd1498Szrj tree a00 = TREE_OPERAND (arg0, 0);
811238fd1498Szrj tree a01 = TREE_OPERAND (arg0, 1);
811338fd1498Szrj tree a10 = TREE_OPERAND (arg1, 0);
811438fd1498Szrj tree a11 = TREE_OPERAND (arg1, 1);
811538fd1498Szrj int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
811638fd1498Szrj || TREE_CODE (arg0) == TRUTH_AND_EXPR)
811738fd1498Szrj && (code == TRUTH_AND_EXPR
811838fd1498Szrj || code == TRUTH_OR_EXPR));
811938fd1498Szrj
812038fd1498Szrj if (operand_equal_p (a00, a10, 0))
812138fd1498Szrj return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
812238fd1498Szrj fold_build2_loc (loc, code, type, a01, a11));
812338fd1498Szrj else if (commutative && operand_equal_p (a00, a11, 0))
812438fd1498Szrj return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
812538fd1498Szrj fold_build2_loc (loc, code, type, a01, a10));
812638fd1498Szrj else if (commutative && operand_equal_p (a01, a10, 0))
812738fd1498Szrj return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
812838fd1498Szrj fold_build2_loc (loc, code, type, a00, a11));
812938fd1498Szrj
813038fd1498Szrj /* This case if tricky because we must either have commutative
813138fd1498Szrj operators or else A10 must not have side-effects. */
813238fd1498Szrj
813338fd1498Szrj else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
813438fd1498Szrj && operand_equal_p (a01, a11, 0))
813538fd1498Szrj return fold_build2_loc (loc, TREE_CODE (arg0), type,
813638fd1498Szrj fold_build2_loc (loc, code, type, a00, a10),
813738fd1498Szrj a01);
813838fd1498Szrj }
813938fd1498Szrj
814038fd1498Szrj /* See if we can build a range comparison. */
814138fd1498Szrj if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
814238fd1498Szrj return tem;
814338fd1498Szrj
814438fd1498Szrj if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
814538fd1498Szrj || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
814638fd1498Szrj {
814738fd1498Szrj tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
814838fd1498Szrj if (tem)
814938fd1498Szrj return fold_build2_loc (loc, code, type, tem, arg1);
815038fd1498Szrj }
815138fd1498Szrj
815238fd1498Szrj if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
815338fd1498Szrj || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
815438fd1498Szrj {
815538fd1498Szrj tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
815638fd1498Szrj if (tem)
815738fd1498Szrj return fold_build2_loc (loc, code, type, arg0, tem);
815838fd1498Szrj }
815938fd1498Szrj
816038fd1498Szrj /* Check for the possibility of merging component references. If our
816138fd1498Szrj lhs is another similar operation, try to merge its rhs with our
816238fd1498Szrj rhs. Then try to merge our lhs and rhs. */
816338fd1498Szrj if (TREE_CODE (arg0) == code
816438fd1498Szrj && (tem = fold_truth_andor_1 (loc, code, type,
816538fd1498Szrj TREE_OPERAND (arg0, 1), arg1)) != 0)
816638fd1498Szrj return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
816738fd1498Szrj
816838fd1498Szrj if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
816938fd1498Szrj return tem;
817038fd1498Szrj
8171*e215fc28Szrj bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8172*e215fc28Szrj if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
8173*e215fc28Szrj logical_op_non_short_circuit
8174*e215fc28Szrj = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
8175*e215fc28Szrj if (logical_op_non_short_circuit
817638fd1498Szrj && !flag_sanitize_coverage
817738fd1498Szrj && (code == TRUTH_AND_EXPR
817838fd1498Szrj || code == TRUTH_ANDIF_EXPR
817938fd1498Szrj || code == TRUTH_OR_EXPR
818038fd1498Szrj || code == TRUTH_ORIF_EXPR))
818138fd1498Szrj {
818238fd1498Szrj enum tree_code ncode, icode;
818338fd1498Szrj
818438fd1498Szrj ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
818538fd1498Szrj ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
818638fd1498Szrj icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
818738fd1498Szrj
818838fd1498Szrj /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
818938fd1498Szrj or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
819038fd1498Szrj We don't want to pack more than two leafs to a non-IF AND/OR
819138fd1498Szrj expression.
819238fd1498Szrj If tree-code of left-hand operand isn't an AND/OR-IF code and not
819338fd1498Szrj equal to IF-CODE, then we don't want to add right-hand operand.
819438fd1498Szrj If the inner right-hand side of left-hand operand has
819538fd1498Szrj side-effects, or isn't simple, then we can't add to it,
819638fd1498Szrj as otherwise we might destroy if-sequence. */
819738fd1498Szrj if (TREE_CODE (arg0) == icode
819838fd1498Szrj && simple_operand_p_2 (arg1)
819938fd1498Szrj /* Needed for sequence points to handle trappings, and
820038fd1498Szrj side-effects. */
820138fd1498Szrj && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
820238fd1498Szrj {
820338fd1498Szrj tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
820438fd1498Szrj arg1);
820538fd1498Szrj return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
820638fd1498Szrj tem);
820738fd1498Szrj }
820838fd1498Szrj /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
820938fd1498Szrj or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
821038fd1498Szrj else if (TREE_CODE (arg1) == icode
821138fd1498Szrj && simple_operand_p_2 (arg0)
821238fd1498Szrj /* Needed for sequence points to handle trappings, and
821338fd1498Szrj side-effects. */
821438fd1498Szrj && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
821538fd1498Szrj {
821638fd1498Szrj tem = fold_build2_loc (loc, ncode, type,
821738fd1498Szrj arg0, TREE_OPERAND (arg1, 0));
821838fd1498Szrj return fold_build2_loc (loc, icode, type, tem,
821938fd1498Szrj TREE_OPERAND (arg1, 1));
822038fd1498Szrj }
822138fd1498Szrj /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
822238fd1498Szrj into (A OR B).
822338fd1498Szrj For sequence point consistancy, we need to check for trapping,
822438fd1498Szrj and side-effects. */
822538fd1498Szrj else if (code == icode && simple_operand_p_2 (arg0)
822638fd1498Szrj && simple_operand_p_2 (arg1))
822738fd1498Szrj return fold_build2_loc (loc, ncode, type, arg0, arg1);
822838fd1498Szrj }
822938fd1498Szrj
823038fd1498Szrj return NULL_TREE;
823138fd1498Szrj }
823238fd1498Szrj
823338fd1498Szrj /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
823438fd1498Szrj by changing CODE to reduce the magnitude of constants involved in
823538fd1498Szrj ARG0 of the comparison.
823638fd1498Szrj Returns a canonicalized comparison tree if a simplification was
823738fd1498Szrj possible, otherwise returns NULL_TREE.
823838fd1498Szrj Set *STRICT_OVERFLOW_P to true if the canonicalization is only
823938fd1498Szrj valid if signed overflow is undefined. */
824038fd1498Szrj
824138fd1498Szrj static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)824238fd1498Szrj maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
824338fd1498Szrj tree arg0, tree arg1,
824438fd1498Szrj bool *strict_overflow_p)
824538fd1498Szrj {
824638fd1498Szrj enum tree_code code0 = TREE_CODE (arg0);
824738fd1498Szrj tree t, cst0 = NULL_TREE;
824838fd1498Szrj int sgn0;
824938fd1498Szrj
825038fd1498Szrj /* Match A +- CST code arg1. We can change this only if overflow
825138fd1498Szrj is undefined. */
825238fd1498Szrj if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
825338fd1498Szrj && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
825438fd1498Szrj /* In principle pointers also have undefined overflow behavior,
825538fd1498Szrj but that causes problems elsewhere. */
825638fd1498Szrj && !POINTER_TYPE_P (TREE_TYPE (arg0))
825738fd1498Szrj && (code0 == MINUS_EXPR
825838fd1498Szrj || code0 == PLUS_EXPR)
825938fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
826038fd1498Szrj return NULL_TREE;
826138fd1498Szrj
826238fd1498Szrj /* Identify the constant in arg0 and its sign. */
826338fd1498Szrj cst0 = TREE_OPERAND (arg0, 1);
826438fd1498Szrj sgn0 = tree_int_cst_sgn (cst0);
826538fd1498Szrj
826638fd1498Szrj /* Overflowed constants and zero will cause problems. */
826738fd1498Szrj if (integer_zerop (cst0)
826838fd1498Szrj || TREE_OVERFLOW (cst0))
826938fd1498Szrj return NULL_TREE;
827038fd1498Szrj
827138fd1498Szrj /* See if we can reduce the magnitude of the constant in
827238fd1498Szrj arg0 by changing the comparison code. */
827338fd1498Szrj /* A - CST < arg1 -> A - CST-1 <= arg1. */
827438fd1498Szrj if (code == LT_EXPR
827538fd1498Szrj && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
827638fd1498Szrj code = LE_EXPR;
827738fd1498Szrj /* A + CST > arg1 -> A + CST-1 >= arg1. */
827838fd1498Szrj else if (code == GT_EXPR
827938fd1498Szrj && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
828038fd1498Szrj code = GE_EXPR;
828138fd1498Szrj /* A + CST <= arg1 -> A + CST-1 < arg1. */
828238fd1498Szrj else if (code == LE_EXPR
828338fd1498Szrj && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
828438fd1498Szrj code = LT_EXPR;
828538fd1498Szrj /* A - CST >= arg1 -> A - CST-1 > arg1. */
828638fd1498Szrj else if (code == GE_EXPR
828738fd1498Szrj && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
828838fd1498Szrj code = GT_EXPR;
828938fd1498Szrj else
829038fd1498Szrj return NULL_TREE;
829138fd1498Szrj *strict_overflow_p = true;
829238fd1498Szrj
829338fd1498Szrj /* Now build the constant reduced in magnitude. But not if that
829438fd1498Szrj would produce one outside of its types range. */
829538fd1498Szrj if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
829638fd1498Szrj && ((sgn0 == 1
829738fd1498Szrj && TYPE_MIN_VALUE (TREE_TYPE (cst0))
829838fd1498Szrj && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
829938fd1498Szrj || (sgn0 == -1
830038fd1498Szrj && TYPE_MAX_VALUE (TREE_TYPE (cst0))
830138fd1498Szrj && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
830238fd1498Szrj return NULL_TREE;
830338fd1498Szrj
830438fd1498Szrj t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
830538fd1498Szrj cst0, build_int_cst (TREE_TYPE (cst0), 1));
830638fd1498Szrj t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
830738fd1498Szrj t = fold_convert (TREE_TYPE (arg1), t);
830838fd1498Szrj
830938fd1498Szrj return fold_build2_loc (loc, code, type, t, arg1);
831038fd1498Szrj }
831138fd1498Szrj
831238fd1498Szrj /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
831338fd1498Szrj overflow further. Try to decrease the magnitude of constants involved
831438fd1498Szrj by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
831538fd1498Szrj and put sole constants at the second argument position.
831638fd1498Szrj Returns the canonicalized tree if changed, otherwise NULL_TREE. */
831738fd1498Szrj
831838fd1498Szrj static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)831938fd1498Szrj maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
832038fd1498Szrj tree arg0, tree arg1)
832138fd1498Szrj {
832238fd1498Szrj tree t;
832338fd1498Szrj bool strict_overflow_p;
832438fd1498Szrj const char * const warnmsg = G_("assuming signed overflow does not occur "
832538fd1498Szrj "when reducing constant in comparison");
832638fd1498Szrj
832738fd1498Szrj /* Try canonicalization by simplifying arg0. */
832838fd1498Szrj strict_overflow_p = false;
832938fd1498Szrj t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
833038fd1498Szrj &strict_overflow_p);
833138fd1498Szrj if (t)
833238fd1498Szrj {
833338fd1498Szrj if (strict_overflow_p)
833438fd1498Szrj fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
833538fd1498Szrj return t;
833638fd1498Szrj }
833738fd1498Szrj
833838fd1498Szrj /* Try canonicalization by simplifying arg1 using the swapped
833938fd1498Szrj comparison. */
834038fd1498Szrj code = swap_tree_comparison (code);
834138fd1498Szrj strict_overflow_p = false;
834238fd1498Szrj t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
834338fd1498Szrj &strict_overflow_p);
834438fd1498Szrj if (t && strict_overflow_p)
834538fd1498Szrj fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
834638fd1498Szrj return t;
834738fd1498Szrj }
834838fd1498Szrj
834938fd1498Szrj /* Return whether BASE + OFFSET + BITPOS may wrap around the address
835038fd1498Szrj space. This is used to avoid issuing overflow warnings for
835138fd1498Szrj expressions like &p->x which can not wrap. */
835238fd1498Szrj
835338fd1498Szrj static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)835438fd1498Szrj pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
835538fd1498Szrj {
835638fd1498Szrj if (!POINTER_TYPE_P (TREE_TYPE (base)))
835738fd1498Szrj return true;
835838fd1498Szrj
835938fd1498Szrj if (maybe_lt (bitpos, 0))
836038fd1498Szrj return true;
836138fd1498Szrj
836238fd1498Szrj poly_wide_int wi_offset;
836338fd1498Szrj int precision = TYPE_PRECISION (TREE_TYPE (base));
836438fd1498Szrj if (offset == NULL_TREE)
836538fd1498Szrj wi_offset = wi::zero (precision);
836638fd1498Szrj else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
836738fd1498Szrj return true;
836838fd1498Szrj else
836938fd1498Szrj wi_offset = wi::to_poly_wide (offset);
837038fd1498Szrj
837138fd1498Szrj bool overflow;
837238fd1498Szrj poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
837338fd1498Szrj precision);
837438fd1498Szrj poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
837538fd1498Szrj if (overflow)
837638fd1498Szrj return true;
837738fd1498Szrj
837838fd1498Szrj poly_uint64 total_hwi, size;
837938fd1498Szrj if (!total.to_uhwi (&total_hwi)
838038fd1498Szrj || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
838138fd1498Szrj &size)
838238fd1498Szrj || known_eq (size, 0U))
838338fd1498Szrj return true;
838438fd1498Szrj
838538fd1498Szrj if (known_le (total_hwi, size))
838638fd1498Szrj return false;
838738fd1498Szrj
838838fd1498Szrj /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
838938fd1498Szrj array. */
839038fd1498Szrj if (TREE_CODE (base) == ADDR_EXPR
839138fd1498Szrj && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
839238fd1498Szrj &size)
839338fd1498Szrj && maybe_ne (size, 0U)
839438fd1498Szrj && known_le (total_hwi, size))
839538fd1498Szrj return false;
839638fd1498Szrj
839738fd1498Szrj return true;
839838fd1498Szrj }
839938fd1498Szrj
840038fd1498Szrj /* Return a positive integer when the symbol DECL is known to have
840138fd1498Szrj a nonzero address, zero when it's known not to (e.g., it's a weak
840238fd1498Szrj symbol), and a negative integer when the symbol is not yet in the
840338fd1498Szrj symbol table and so whether or not its address is zero is unknown.
840438fd1498Szrj For function local objects always return positive integer. */
840538fd1498Szrj static int
maybe_nonzero_address(tree decl)840638fd1498Szrj maybe_nonzero_address (tree decl)
840738fd1498Szrj {
840838fd1498Szrj if (DECL_P (decl) && decl_in_symtab_p (decl))
840938fd1498Szrj if (struct symtab_node *symbol = symtab_node::get_create (decl))
841038fd1498Szrj return symbol->nonzero_address ();
841138fd1498Szrj
841238fd1498Szrj /* Function local objects are never NULL. */
841338fd1498Szrj if (DECL_P (decl)
841438fd1498Szrj && (DECL_CONTEXT (decl)
841538fd1498Szrj && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
841638fd1498Szrj && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
841738fd1498Szrj return 1;
841838fd1498Szrj
841938fd1498Szrj return -1;
842038fd1498Szrj }
842138fd1498Szrj
842238fd1498Szrj /* Subroutine of fold_binary. This routine performs all of the
842338fd1498Szrj transformations that are common to the equality/inequality
842438fd1498Szrj operators (EQ_EXPR and NE_EXPR) and the ordering operators
842538fd1498Szrj (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
842638fd1498Szrj fold_binary should call fold_binary. Fold a comparison with
842738fd1498Szrj tree code CODE and type TYPE with operands OP0 and OP1. Return
842838fd1498Szrj the folded comparison or NULL_TREE. */
842938fd1498Szrj
843038fd1498Szrj static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)843138fd1498Szrj fold_comparison (location_t loc, enum tree_code code, tree type,
843238fd1498Szrj tree op0, tree op1)
843338fd1498Szrj {
843438fd1498Szrj const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
843538fd1498Szrj tree arg0, arg1, tem;
843638fd1498Szrj
843738fd1498Szrj arg0 = op0;
843838fd1498Szrj arg1 = op1;
843938fd1498Szrj
844038fd1498Szrj STRIP_SIGN_NOPS (arg0);
844138fd1498Szrj STRIP_SIGN_NOPS (arg1);
844238fd1498Szrj
844338fd1498Szrj /* For comparisons of pointers we can decompose it to a compile time
844438fd1498Szrj comparison of the base objects and the offsets into the object.
844538fd1498Szrj This requires at least one operand being an ADDR_EXPR or a
844638fd1498Szrj POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
844738fd1498Szrj if (POINTER_TYPE_P (TREE_TYPE (arg0))
844838fd1498Szrj && (TREE_CODE (arg0) == ADDR_EXPR
844938fd1498Szrj || TREE_CODE (arg1) == ADDR_EXPR
845038fd1498Szrj || TREE_CODE (arg0) == POINTER_PLUS_EXPR
845138fd1498Szrj || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
845238fd1498Szrj {
845338fd1498Szrj tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
845438fd1498Szrj poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
845538fd1498Szrj machine_mode mode;
845638fd1498Szrj int volatilep, reversep, unsignedp;
845738fd1498Szrj bool indirect_base0 = false, indirect_base1 = false;
845838fd1498Szrj
845938fd1498Szrj /* Get base and offset for the access. Strip ADDR_EXPR for
846038fd1498Szrj get_inner_reference, but put it back by stripping INDIRECT_REF
846138fd1498Szrj off the base object if possible. indirect_baseN will be true
846238fd1498Szrj if baseN is not an address but refers to the object itself. */
846338fd1498Szrj base0 = arg0;
846438fd1498Szrj if (TREE_CODE (arg0) == ADDR_EXPR)
846538fd1498Szrj {
846638fd1498Szrj base0
846738fd1498Szrj = get_inner_reference (TREE_OPERAND (arg0, 0),
846838fd1498Szrj &bitsize, &bitpos0, &offset0, &mode,
846938fd1498Szrj &unsignedp, &reversep, &volatilep);
847038fd1498Szrj if (TREE_CODE (base0) == INDIRECT_REF)
847138fd1498Szrj base0 = TREE_OPERAND (base0, 0);
847238fd1498Szrj else
847338fd1498Szrj indirect_base0 = true;
847438fd1498Szrj }
847538fd1498Szrj else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
847638fd1498Szrj {
847738fd1498Szrj base0 = TREE_OPERAND (arg0, 0);
847838fd1498Szrj STRIP_SIGN_NOPS (base0);
847938fd1498Szrj if (TREE_CODE (base0) == ADDR_EXPR)
848038fd1498Szrj {
848138fd1498Szrj base0
848238fd1498Szrj = get_inner_reference (TREE_OPERAND (base0, 0),
848338fd1498Szrj &bitsize, &bitpos0, &offset0, &mode,
848438fd1498Szrj &unsignedp, &reversep, &volatilep);
848538fd1498Szrj if (TREE_CODE (base0) == INDIRECT_REF)
848638fd1498Szrj base0 = TREE_OPERAND (base0, 0);
848738fd1498Szrj else
848838fd1498Szrj indirect_base0 = true;
848938fd1498Szrj }
849038fd1498Szrj if (offset0 == NULL_TREE || integer_zerop (offset0))
849138fd1498Szrj offset0 = TREE_OPERAND (arg0, 1);
849238fd1498Szrj else
849338fd1498Szrj offset0 = size_binop (PLUS_EXPR, offset0,
849438fd1498Szrj TREE_OPERAND (arg0, 1));
849538fd1498Szrj if (poly_int_tree_p (offset0))
849638fd1498Szrj {
849738fd1498Szrj poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
849838fd1498Szrj TYPE_PRECISION (sizetype));
849938fd1498Szrj tem <<= LOG2_BITS_PER_UNIT;
850038fd1498Szrj tem += bitpos0;
850138fd1498Szrj if (tem.to_shwi (&bitpos0))
850238fd1498Szrj offset0 = NULL_TREE;
850338fd1498Szrj }
850438fd1498Szrj }
850538fd1498Szrj
850638fd1498Szrj base1 = arg1;
850738fd1498Szrj if (TREE_CODE (arg1) == ADDR_EXPR)
850838fd1498Szrj {
850938fd1498Szrj base1
851038fd1498Szrj = get_inner_reference (TREE_OPERAND (arg1, 0),
851138fd1498Szrj &bitsize, &bitpos1, &offset1, &mode,
851238fd1498Szrj &unsignedp, &reversep, &volatilep);
851338fd1498Szrj if (TREE_CODE (base1) == INDIRECT_REF)
851438fd1498Szrj base1 = TREE_OPERAND (base1, 0);
851538fd1498Szrj else
851638fd1498Szrj indirect_base1 = true;
851738fd1498Szrj }
851838fd1498Szrj else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
851938fd1498Szrj {
852038fd1498Szrj base1 = TREE_OPERAND (arg1, 0);
852138fd1498Szrj STRIP_SIGN_NOPS (base1);
852238fd1498Szrj if (TREE_CODE (base1) == ADDR_EXPR)
852338fd1498Szrj {
852438fd1498Szrj base1
852538fd1498Szrj = get_inner_reference (TREE_OPERAND (base1, 0),
852638fd1498Szrj &bitsize, &bitpos1, &offset1, &mode,
852738fd1498Szrj &unsignedp, &reversep, &volatilep);
852838fd1498Szrj if (TREE_CODE (base1) == INDIRECT_REF)
852938fd1498Szrj base1 = TREE_OPERAND (base1, 0);
853038fd1498Szrj else
853138fd1498Szrj indirect_base1 = true;
853238fd1498Szrj }
853338fd1498Szrj if (offset1 == NULL_TREE || integer_zerop (offset1))
853438fd1498Szrj offset1 = TREE_OPERAND (arg1, 1);
853538fd1498Szrj else
853638fd1498Szrj offset1 = size_binop (PLUS_EXPR, offset1,
853738fd1498Szrj TREE_OPERAND (arg1, 1));
853838fd1498Szrj if (poly_int_tree_p (offset1))
853938fd1498Szrj {
854038fd1498Szrj poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
854138fd1498Szrj TYPE_PRECISION (sizetype));
854238fd1498Szrj tem <<= LOG2_BITS_PER_UNIT;
854338fd1498Szrj tem += bitpos1;
854438fd1498Szrj if (tem.to_shwi (&bitpos1))
854538fd1498Szrj offset1 = NULL_TREE;
854638fd1498Szrj }
854738fd1498Szrj }
854838fd1498Szrj
854938fd1498Szrj /* If we have equivalent bases we might be able to simplify. */
855038fd1498Szrj if (indirect_base0 == indirect_base1
855138fd1498Szrj && operand_equal_p (base0, base1,
855238fd1498Szrj indirect_base0 ? OEP_ADDRESS_OF : 0))
855338fd1498Szrj {
855438fd1498Szrj /* We can fold this expression to a constant if the non-constant
855538fd1498Szrj offset parts are equal. */
855638fd1498Szrj if ((offset0 == offset1
855738fd1498Szrj || (offset0 && offset1
855838fd1498Szrj && operand_equal_p (offset0, offset1, 0)))
855938fd1498Szrj && (equality_code
856038fd1498Szrj || (indirect_base0
856138fd1498Szrj && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
856238fd1498Szrj || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
856338fd1498Szrj {
856438fd1498Szrj if (!equality_code
856538fd1498Szrj && maybe_ne (bitpos0, bitpos1)
856638fd1498Szrj && (pointer_may_wrap_p (base0, offset0, bitpos0)
856738fd1498Szrj || pointer_may_wrap_p (base1, offset1, bitpos1)))
856838fd1498Szrj fold_overflow_warning (("assuming pointer wraparound does not "
856938fd1498Szrj "occur when comparing P +- C1 with "
857038fd1498Szrj "P +- C2"),
857138fd1498Szrj WARN_STRICT_OVERFLOW_CONDITIONAL);
857238fd1498Szrj
857338fd1498Szrj switch (code)
857438fd1498Szrj {
857538fd1498Szrj case EQ_EXPR:
857638fd1498Szrj if (known_eq (bitpos0, bitpos1))
857738fd1498Szrj return constant_boolean_node (true, type);
857838fd1498Szrj if (known_ne (bitpos0, bitpos1))
857938fd1498Szrj return constant_boolean_node (false, type);
858038fd1498Szrj break;
858138fd1498Szrj case NE_EXPR:
858238fd1498Szrj if (known_ne (bitpos0, bitpos1))
858338fd1498Szrj return constant_boolean_node (true, type);
858438fd1498Szrj if (known_eq (bitpos0, bitpos1))
858538fd1498Szrj return constant_boolean_node (false, type);
858638fd1498Szrj break;
858738fd1498Szrj case LT_EXPR:
858838fd1498Szrj if (known_lt (bitpos0, bitpos1))
858938fd1498Szrj return constant_boolean_node (true, type);
859038fd1498Szrj if (known_ge (bitpos0, bitpos1))
859138fd1498Szrj return constant_boolean_node (false, type);
859238fd1498Szrj break;
859338fd1498Szrj case LE_EXPR:
859438fd1498Szrj if (known_le (bitpos0, bitpos1))
859538fd1498Szrj return constant_boolean_node (true, type);
859638fd1498Szrj if (known_gt (bitpos0, bitpos1))
859738fd1498Szrj return constant_boolean_node (false, type);
859838fd1498Szrj break;
859938fd1498Szrj case GE_EXPR:
860038fd1498Szrj if (known_ge (bitpos0, bitpos1))
860138fd1498Szrj return constant_boolean_node (true, type);
860238fd1498Szrj if (known_lt (bitpos0, bitpos1))
860338fd1498Szrj return constant_boolean_node (false, type);
860438fd1498Szrj break;
860538fd1498Szrj case GT_EXPR:
860638fd1498Szrj if (known_gt (bitpos0, bitpos1))
860738fd1498Szrj return constant_boolean_node (true, type);
860838fd1498Szrj if (known_le (bitpos0, bitpos1))
860938fd1498Szrj return constant_boolean_node (false, type);
861038fd1498Szrj break;
861138fd1498Szrj default:;
861238fd1498Szrj }
861338fd1498Szrj }
861438fd1498Szrj /* We can simplify the comparison to a comparison of the variable
861538fd1498Szrj offset parts if the constant offset parts are equal.
861638fd1498Szrj Be careful to use signed sizetype here because otherwise we
861738fd1498Szrj mess with array offsets in the wrong way. This is possible
861838fd1498Szrj because pointer arithmetic is restricted to retain within an
861938fd1498Szrj object and overflow on pointer differences is undefined as of
862038fd1498Szrj 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
862138fd1498Szrj else if (known_eq (bitpos0, bitpos1)
862238fd1498Szrj && (equality_code
862338fd1498Szrj || (indirect_base0
862438fd1498Szrj && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
862538fd1498Szrj || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
862638fd1498Szrj {
862738fd1498Szrj /* By converting to signed sizetype we cover middle-end pointer
862838fd1498Szrj arithmetic which operates on unsigned pointer types of size
862938fd1498Szrj type size and ARRAY_REF offsets which are properly sign or
863038fd1498Szrj zero extended from their type in case it is narrower than
863138fd1498Szrj sizetype. */
863238fd1498Szrj if (offset0 == NULL_TREE)
863338fd1498Szrj offset0 = build_int_cst (ssizetype, 0);
863438fd1498Szrj else
863538fd1498Szrj offset0 = fold_convert_loc (loc, ssizetype, offset0);
863638fd1498Szrj if (offset1 == NULL_TREE)
863738fd1498Szrj offset1 = build_int_cst (ssizetype, 0);
863838fd1498Szrj else
863938fd1498Szrj offset1 = fold_convert_loc (loc, ssizetype, offset1);
864038fd1498Szrj
864138fd1498Szrj if (!equality_code
864238fd1498Szrj && (pointer_may_wrap_p (base0, offset0, bitpos0)
864338fd1498Szrj || pointer_may_wrap_p (base1, offset1, bitpos1)))
864438fd1498Szrj fold_overflow_warning (("assuming pointer wraparound does not "
864538fd1498Szrj "occur when comparing P +- C1 with "
864638fd1498Szrj "P +- C2"),
864738fd1498Szrj WARN_STRICT_OVERFLOW_COMPARISON);
864838fd1498Szrj
864938fd1498Szrj return fold_build2_loc (loc, code, type, offset0, offset1);
865038fd1498Szrj }
865138fd1498Szrj }
865238fd1498Szrj /* For equal offsets we can simplify to a comparison of the
865338fd1498Szrj base addresses. */
865438fd1498Szrj else if (known_eq (bitpos0, bitpos1)
865538fd1498Szrj && (indirect_base0
865638fd1498Szrj ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
865738fd1498Szrj && (indirect_base1
865838fd1498Szrj ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
865938fd1498Szrj && ((offset0 == offset1)
866038fd1498Szrj || (offset0 && offset1
866138fd1498Szrj && operand_equal_p (offset0, offset1, 0))))
866238fd1498Szrj {
866338fd1498Szrj if (indirect_base0)
866438fd1498Szrj base0 = build_fold_addr_expr_loc (loc, base0);
866538fd1498Szrj if (indirect_base1)
866638fd1498Szrj base1 = build_fold_addr_expr_loc (loc, base1);
866738fd1498Szrj return fold_build2_loc (loc, code, type, base0, base1);
866838fd1498Szrj }
866938fd1498Szrj /* Comparison between an ordinary (non-weak) symbol and a null
867038fd1498Szrj pointer can be eliminated since such symbols must have a non
867138fd1498Szrj null address. In C, relational expressions between pointers
867238fd1498Szrj to objects and null pointers are undefined. The results
867338fd1498Szrj below follow the C++ rules with the additional property that
867438fd1498Szrj every object pointer compares greater than a null pointer.
867538fd1498Szrj */
867638fd1498Szrj else if (((DECL_P (base0)
867738fd1498Szrj && maybe_nonzero_address (base0) > 0
867838fd1498Szrj /* Avoid folding references to struct members at offset 0 to
867938fd1498Szrj prevent tests like '&ptr->firstmember == 0' from getting
868038fd1498Szrj eliminated. When ptr is null, although the -> expression
868138fd1498Szrj is strictly speaking invalid, GCC retains it as a matter
868238fd1498Szrj of QoI. See PR c/44555. */
868338fd1498Szrj && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
868438fd1498Szrj || CONSTANT_CLASS_P (base0))
868538fd1498Szrj && indirect_base0
868638fd1498Szrj /* The caller guarantees that when one of the arguments is
868738fd1498Szrj constant (i.e., null in this case) it is second. */
868838fd1498Szrj && integer_zerop (arg1))
868938fd1498Szrj {
869038fd1498Szrj switch (code)
869138fd1498Szrj {
869238fd1498Szrj case EQ_EXPR:
869338fd1498Szrj case LE_EXPR:
869438fd1498Szrj case LT_EXPR:
869538fd1498Szrj return constant_boolean_node (false, type);
869638fd1498Szrj case GE_EXPR:
869738fd1498Szrj case GT_EXPR:
869838fd1498Szrj case NE_EXPR:
869938fd1498Szrj return constant_boolean_node (true, type);
870038fd1498Szrj default:
870138fd1498Szrj gcc_unreachable ();
870238fd1498Szrj }
870338fd1498Szrj }
870438fd1498Szrj }
870538fd1498Szrj
870638fd1498Szrj /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
870738fd1498Szrj X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
870838fd1498Szrj the resulting offset is smaller in absolute value than the
870938fd1498Szrj original one and has the same sign. */
871038fd1498Szrj if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
871138fd1498Szrj && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
871238fd1498Szrj && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
871338fd1498Szrj && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
871438fd1498Szrj && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
871538fd1498Szrj && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
871638fd1498Szrj && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
871738fd1498Szrj && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
871838fd1498Szrj {
871938fd1498Szrj tree const1 = TREE_OPERAND (arg0, 1);
872038fd1498Szrj tree const2 = TREE_OPERAND (arg1, 1);
872138fd1498Szrj tree variable1 = TREE_OPERAND (arg0, 0);
872238fd1498Szrj tree variable2 = TREE_OPERAND (arg1, 0);
872338fd1498Szrj tree cst;
872438fd1498Szrj const char * const warnmsg = G_("assuming signed overflow does not "
872538fd1498Szrj "occur when combining constants around "
872638fd1498Szrj "a comparison");
872738fd1498Szrj
872838fd1498Szrj /* Put the constant on the side where it doesn't overflow and is
872938fd1498Szrj of lower absolute value and of same sign than before. */
873038fd1498Szrj cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
873138fd1498Szrj ? MINUS_EXPR : PLUS_EXPR,
873238fd1498Szrj const2, const1);
873338fd1498Szrj if (!TREE_OVERFLOW (cst)
873438fd1498Szrj && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
873538fd1498Szrj && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
873638fd1498Szrj {
873738fd1498Szrj fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
873838fd1498Szrj return fold_build2_loc (loc, code, type,
873938fd1498Szrj variable1,
874038fd1498Szrj fold_build2_loc (loc, TREE_CODE (arg1),
874138fd1498Szrj TREE_TYPE (arg1),
874238fd1498Szrj variable2, cst));
874338fd1498Szrj }
874438fd1498Szrj
874538fd1498Szrj cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
874638fd1498Szrj ? MINUS_EXPR : PLUS_EXPR,
874738fd1498Szrj const1, const2);
874838fd1498Szrj if (!TREE_OVERFLOW (cst)
874938fd1498Szrj && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
875038fd1498Szrj && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
875138fd1498Szrj {
875238fd1498Szrj fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
875338fd1498Szrj return fold_build2_loc (loc, code, type,
875438fd1498Szrj fold_build2_loc (loc, TREE_CODE (arg0),
875538fd1498Szrj TREE_TYPE (arg0),
875638fd1498Szrj variable1, cst),
875738fd1498Szrj variable2);
875838fd1498Szrj }
875938fd1498Szrj }
876038fd1498Szrj
876138fd1498Szrj tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
876238fd1498Szrj if (tem)
876338fd1498Szrj return tem;
876438fd1498Szrj
876538fd1498Szrj /* If we are comparing an expression that just has comparisons
876638fd1498Szrj of two integer values, arithmetic expressions of those comparisons,
876738fd1498Szrj and constants, we can simplify it. There are only three cases
876838fd1498Szrj to check: the two values can either be equal, the first can be
876938fd1498Szrj greater, or the second can be greater. Fold the expression for
877038fd1498Szrj those three values. Since each value must be 0 or 1, we have
877138fd1498Szrj eight possibilities, each of which corresponds to the constant 0
877238fd1498Szrj or 1 or one of the six possible comparisons.
877338fd1498Szrj
877438fd1498Szrj This handles common cases like (a > b) == 0 but also handles
877538fd1498Szrj expressions like ((x > y) - (y > x)) > 0, which supposedly
877638fd1498Szrj occur in macroized code. */
877738fd1498Szrj
877838fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
877938fd1498Szrj {
878038fd1498Szrj tree cval1 = 0, cval2 = 0;
878138fd1498Szrj
878238fd1498Szrj if (twoval_comparison_p (arg0, &cval1, &cval2)
878338fd1498Szrj /* Don't handle degenerate cases here; they should already
878438fd1498Szrj have been handled anyway. */
878538fd1498Szrj && cval1 != 0 && cval2 != 0
878638fd1498Szrj && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
878738fd1498Szrj && TREE_TYPE (cval1) == TREE_TYPE (cval2)
878838fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
878938fd1498Szrj && TYPE_MAX_VALUE (TREE_TYPE (cval1))
879038fd1498Szrj && TYPE_MAX_VALUE (TREE_TYPE (cval2))
879138fd1498Szrj && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
879238fd1498Szrj TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
879338fd1498Szrj {
879438fd1498Szrj tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
879538fd1498Szrj tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
879638fd1498Szrj
879738fd1498Szrj /* We can't just pass T to eval_subst in case cval1 or cval2
879838fd1498Szrj was the same as ARG1. */
879938fd1498Szrj
880038fd1498Szrj tree high_result
880138fd1498Szrj = fold_build2_loc (loc, code, type,
880238fd1498Szrj eval_subst (loc, arg0, cval1, maxval,
880338fd1498Szrj cval2, minval),
880438fd1498Szrj arg1);
880538fd1498Szrj tree equal_result
880638fd1498Szrj = fold_build2_loc (loc, code, type,
880738fd1498Szrj eval_subst (loc, arg0, cval1, maxval,
880838fd1498Szrj cval2, maxval),
880938fd1498Szrj arg1);
881038fd1498Szrj tree low_result
881138fd1498Szrj = fold_build2_loc (loc, code, type,
881238fd1498Szrj eval_subst (loc, arg0, cval1, minval,
881338fd1498Szrj cval2, maxval),
881438fd1498Szrj arg1);
881538fd1498Szrj
881638fd1498Szrj /* All three of these results should be 0 or 1. Confirm they are.
881738fd1498Szrj Then use those values to select the proper code to use. */
881838fd1498Szrj
881938fd1498Szrj if (TREE_CODE (high_result) == INTEGER_CST
882038fd1498Szrj && TREE_CODE (equal_result) == INTEGER_CST
882138fd1498Szrj && TREE_CODE (low_result) == INTEGER_CST)
882238fd1498Szrj {
882338fd1498Szrj /* Make a 3-bit mask with the high-order bit being the
882438fd1498Szrj value for `>', the next for '=', and the low for '<'. */
882538fd1498Szrj switch ((integer_onep (high_result) * 4)
882638fd1498Szrj + (integer_onep (equal_result) * 2)
882738fd1498Szrj + integer_onep (low_result))
882838fd1498Szrj {
882938fd1498Szrj case 0:
883038fd1498Szrj /* Always false. */
883138fd1498Szrj return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
883238fd1498Szrj case 1:
883338fd1498Szrj code = LT_EXPR;
883438fd1498Szrj break;
883538fd1498Szrj case 2:
883638fd1498Szrj code = EQ_EXPR;
883738fd1498Szrj break;
883838fd1498Szrj case 3:
883938fd1498Szrj code = LE_EXPR;
884038fd1498Szrj break;
884138fd1498Szrj case 4:
884238fd1498Szrj code = GT_EXPR;
884338fd1498Szrj break;
884438fd1498Szrj case 5:
884538fd1498Szrj code = NE_EXPR;
884638fd1498Szrj break;
884738fd1498Szrj case 6:
884838fd1498Szrj code = GE_EXPR;
884938fd1498Szrj break;
885038fd1498Szrj case 7:
885138fd1498Szrj /* Always true. */
885238fd1498Szrj return omit_one_operand_loc (loc, type, integer_one_node, arg0);
885338fd1498Szrj }
885438fd1498Szrj
885538fd1498Szrj return fold_build2_loc (loc, code, type, cval1, cval2);
885638fd1498Szrj }
885738fd1498Szrj }
885838fd1498Szrj }
885938fd1498Szrj
886038fd1498Szrj return NULL_TREE;
886138fd1498Szrj }
886238fd1498Szrj
886338fd1498Szrj
886438fd1498Szrj /* Subroutine of fold_binary. Optimize complex multiplications of the
886538fd1498Szrj form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
886638fd1498Szrj argument EXPR represents the expression "z" of type TYPE. */
886738fd1498Szrj
886838fd1498Szrj static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)886938fd1498Szrj fold_mult_zconjz (location_t loc, tree type, tree expr)
887038fd1498Szrj {
887138fd1498Szrj tree itype = TREE_TYPE (type);
887238fd1498Szrj tree rpart, ipart, tem;
887338fd1498Szrj
887438fd1498Szrj if (TREE_CODE (expr) == COMPLEX_EXPR)
887538fd1498Szrj {
887638fd1498Szrj rpart = TREE_OPERAND (expr, 0);
887738fd1498Szrj ipart = TREE_OPERAND (expr, 1);
887838fd1498Szrj }
887938fd1498Szrj else if (TREE_CODE (expr) == COMPLEX_CST)
888038fd1498Szrj {
888138fd1498Szrj rpart = TREE_REALPART (expr);
888238fd1498Szrj ipart = TREE_IMAGPART (expr);
888338fd1498Szrj }
888438fd1498Szrj else
888538fd1498Szrj {
888638fd1498Szrj expr = save_expr (expr);
888738fd1498Szrj rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
888838fd1498Szrj ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
888938fd1498Szrj }
889038fd1498Szrj
889138fd1498Szrj rpart = save_expr (rpart);
889238fd1498Szrj ipart = save_expr (ipart);
889338fd1498Szrj tem = fold_build2_loc (loc, PLUS_EXPR, itype,
889438fd1498Szrj fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
889538fd1498Szrj fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
889638fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
889738fd1498Szrj build_zero_cst (itype));
889838fd1498Szrj }
889938fd1498Szrj
890038fd1498Szrj
890138fd1498Szrj /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
890238fd1498Szrj CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
890338fd1498Szrj true if successful. */
890438fd1498Szrj
890538fd1498Szrj static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)890638fd1498Szrj vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
890738fd1498Szrj {
890838fd1498Szrj unsigned HOST_WIDE_INT i, nunits;
890938fd1498Szrj
891038fd1498Szrj if (TREE_CODE (arg) == VECTOR_CST
891138fd1498Szrj && VECTOR_CST_NELTS (arg).is_constant (&nunits))
891238fd1498Szrj {
891338fd1498Szrj for (i = 0; i < nunits; ++i)
891438fd1498Szrj elts[i] = VECTOR_CST_ELT (arg, i);
891538fd1498Szrj }
891638fd1498Szrj else if (TREE_CODE (arg) == CONSTRUCTOR)
891738fd1498Szrj {
891838fd1498Szrj constructor_elt *elt;
891938fd1498Szrj
892038fd1498Szrj FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
892138fd1498Szrj if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
892238fd1498Szrj return false;
892338fd1498Szrj else
892438fd1498Szrj elts[i] = elt->value;
892538fd1498Szrj }
892638fd1498Szrj else
892738fd1498Szrj return false;
892838fd1498Szrj for (; i < nelts; i++)
892938fd1498Szrj elts[i]
893038fd1498Szrj = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
893138fd1498Szrj return true;
893238fd1498Szrj }
893338fd1498Szrj
893438fd1498Szrj /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
893538fd1498Szrj selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
893638fd1498Szrj NULL_TREE otherwise. */
893738fd1498Szrj
893838fd1498Szrj static tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)893938fd1498Szrj fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
894038fd1498Szrj {
894138fd1498Szrj unsigned int i;
894238fd1498Szrj unsigned HOST_WIDE_INT nelts;
894338fd1498Szrj bool need_ctor = false;
894438fd1498Szrj
894538fd1498Szrj if (!sel.length ().is_constant (&nelts))
894638fd1498Szrj return NULL_TREE;
894738fd1498Szrj gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
894838fd1498Szrj && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
894938fd1498Szrj && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
895038fd1498Szrj if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
895138fd1498Szrj || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
895238fd1498Szrj return NULL_TREE;
895338fd1498Szrj
895438fd1498Szrj tree *in_elts = XALLOCAVEC (tree, nelts * 2);
895538fd1498Szrj if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
895638fd1498Szrj || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
895738fd1498Szrj return NULL_TREE;
895838fd1498Szrj
895938fd1498Szrj tree_vector_builder out_elts (type, nelts, 1);
896038fd1498Szrj for (i = 0; i < nelts; i++)
896138fd1498Szrj {
896238fd1498Szrj HOST_WIDE_INT index;
896338fd1498Szrj if (!sel[i].is_constant (&index))
896438fd1498Szrj return NULL_TREE;
896538fd1498Szrj if (!CONSTANT_CLASS_P (in_elts[index]))
896638fd1498Szrj need_ctor = true;
896738fd1498Szrj out_elts.quick_push (unshare_expr (in_elts[index]));
896838fd1498Szrj }
896938fd1498Szrj
897038fd1498Szrj if (need_ctor)
897138fd1498Szrj {
897238fd1498Szrj vec<constructor_elt, va_gc> *v;
897338fd1498Szrj vec_alloc (v, nelts);
897438fd1498Szrj for (i = 0; i < nelts; i++)
897538fd1498Szrj CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
897638fd1498Szrj return build_constructor (type, v);
897738fd1498Szrj }
897838fd1498Szrj else
897938fd1498Szrj return out_elts.build ();
898038fd1498Szrj }
898138fd1498Szrj
898238fd1498Szrj /* Try to fold a pointer difference of type TYPE two address expressions of
898338fd1498Szrj array references AREF0 and AREF1 using location LOC. Return a
898438fd1498Szrj simplified expression for the difference or NULL_TREE. */
898538fd1498Szrj
898638fd1498Szrj static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)898738fd1498Szrj fold_addr_of_array_ref_difference (location_t loc, tree type,
898838fd1498Szrj tree aref0, tree aref1,
898938fd1498Szrj bool use_pointer_diff)
899038fd1498Szrj {
899138fd1498Szrj tree base0 = TREE_OPERAND (aref0, 0);
899238fd1498Szrj tree base1 = TREE_OPERAND (aref1, 0);
899338fd1498Szrj tree base_offset = build_int_cst (type, 0);
899438fd1498Szrj
899538fd1498Szrj /* If the bases are array references as well, recurse. If the bases
899638fd1498Szrj are pointer indirections compute the difference of the pointers.
899738fd1498Szrj If the bases are equal, we are set. */
899838fd1498Szrj if ((TREE_CODE (base0) == ARRAY_REF
899938fd1498Szrj && TREE_CODE (base1) == ARRAY_REF
900038fd1498Szrj && (base_offset
900138fd1498Szrj = fold_addr_of_array_ref_difference (loc, type, base0, base1,
900238fd1498Szrj use_pointer_diff)))
900338fd1498Szrj || (INDIRECT_REF_P (base0)
900438fd1498Szrj && INDIRECT_REF_P (base1)
900538fd1498Szrj && (base_offset
900638fd1498Szrj = use_pointer_diff
900738fd1498Szrj ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
900838fd1498Szrj TREE_OPERAND (base0, 0),
900938fd1498Szrj TREE_OPERAND (base1, 0))
901038fd1498Szrj : fold_binary_loc (loc, MINUS_EXPR, type,
901138fd1498Szrj fold_convert (type,
901238fd1498Szrj TREE_OPERAND (base0, 0)),
901338fd1498Szrj fold_convert (type,
901438fd1498Szrj TREE_OPERAND (base1, 0)))))
901538fd1498Szrj || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
901638fd1498Szrj {
901738fd1498Szrj tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
901838fd1498Szrj tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
901938fd1498Szrj tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
902038fd1498Szrj tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
902138fd1498Szrj return fold_build2_loc (loc, PLUS_EXPR, type,
902238fd1498Szrj base_offset,
902338fd1498Szrj fold_build2_loc (loc, MULT_EXPR, type,
902438fd1498Szrj diff, esz));
902538fd1498Szrj }
902638fd1498Szrj return NULL_TREE;
902738fd1498Szrj }
902838fd1498Szrj
902938fd1498Szrj /* If the real or vector real constant CST of type TYPE has an exact
903038fd1498Szrj inverse, return it, else return NULL. */
903138fd1498Szrj
903238fd1498Szrj tree
exact_inverse(tree type,tree cst)903338fd1498Szrj exact_inverse (tree type, tree cst)
903438fd1498Szrj {
903538fd1498Szrj REAL_VALUE_TYPE r;
903638fd1498Szrj tree unit_type;
903738fd1498Szrj machine_mode mode;
903838fd1498Szrj
903938fd1498Szrj switch (TREE_CODE (cst))
904038fd1498Szrj {
904138fd1498Szrj case REAL_CST:
904238fd1498Szrj r = TREE_REAL_CST (cst);
904338fd1498Szrj
904438fd1498Szrj if (exact_real_inverse (TYPE_MODE (type), &r))
904538fd1498Szrj return build_real (type, r);
904638fd1498Szrj
904738fd1498Szrj return NULL_TREE;
904838fd1498Szrj
904938fd1498Szrj case VECTOR_CST:
905038fd1498Szrj {
905138fd1498Szrj unit_type = TREE_TYPE (type);
905238fd1498Szrj mode = TYPE_MODE (unit_type);
905338fd1498Szrj
905438fd1498Szrj tree_vector_builder elts;
905538fd1498Szrj if (!elts.new_unary_operation (type, cst, false))
905638fd1498Szrj return NULL_TREE;
905738fd1498Szrj unsigned int count = elts.encoded_nelts ();
905838fd1498Szrj for (unsigned int i = 0; i < count; ++i)
905938fd1498Szrj {
906038fd1498Szrj r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
906138fd1498Szrj if (!exact_real_inverse (mode, &r))
906238fd1498Szrj return NULL_TREE;
906338fd1498Szrj elts.quick_push (build_real (unit_type, r));
906438fd1498Szrj }
906538fd1498Szrj
906638fd1498Szrj return elts.build ();
906738fd1498Szrj }
906838fd1498Szrj
906938fd1498Szrj default:
907038fd1498Szrj return NULL_TREE;
907138fd1498Szrj }
907238fd1498Szrj }
907338fd1498Szrj
907438fd1498Szrj /* Mask out the tz least significant bits of X of type TYPE where
907538fd1498Szrj tz is the number of trailing zeroes in Y. */
907638fd1498Szrj static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)907738fd1498Szrj mask_with_tz (tree type, const wide_int &x, const wide_int &y)
907838fd1498Szrj {
907938fd1498Szrj int tz = wi::ctz (y);
908038fd1498Szrj if (tz > 0)
908138fd1498Szrj return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
908238fd1498Szrj return x;
908338fd1498Szrj }
908438fd1498Szrj
908538fd1498Szrj /* Return true when T is an address and is known to be nonzero.
908638fd1498Szrj For floating point we further ensure that T is not denormal.
908738fd1498Szrj Similar logic is present in nonzero_address in rtlanal.h.
908838fd1498Szrj
908938fd1498Szrj If the return value is based on the assumption that signed overflow
909038fd1498Szrj is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
909138fd1498Szrj change *STRICT_OVERFLOW_P. */
909238fd1498Szrj
909338fd1498Szrj static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)909438fd1498Szrj tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
909538fd1498Szrj {
909638fd1498Szrj tree type = TREE_TYPE (t);
909738fd1498Szrj enum tree_code code;
909838fd1498Szrj
909938fd1498Szrj /* Doing something useful for floating point would need more work. */
910038fd1498Szrj if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
910138fd1498Szrj return false;
910238fd1498Szrj
910338fd1498Szrj code = TREE_CODE (t);
910438fd1498Szrj switch (TREE_CODE_CLASS (code))
910538fd1498Szrj {
910638fd1498Szrj case tcc_unary:
910738fd1498Szrj return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
910838fd1498Szrj strict_overflow_p);
910938fd1498Szrj case tcc_binary:
911038fd1498Szrj case tcc_comparison:
911138fd1498Szrj return tree_binary_nonzero_warnv_p (code, type,
911238fd1498Szrj TREE_OPERAND (t, 0),
911338fd1498Szrj TREE_OPERAND (t, 1),
911438fd1498Szrj strict_overflow_p);
911538fd1498Szrj case tcc_constant:
911638fd1498Szrj case tcc_declaration:
911738fd1498Szrj case tcc_reference:
911838fd1498Szrj return tree_single_nonzero_warnv_p (t, strict_overflow_p);
911938fd1498Szrj
912038fd1498Szrj default:
912138fd1498Szrj break;
912238fd1498Szrj }
912338fd1498Szrj
912438fd1498Szrj switch (code)
912538fd1498Szrj {
912638fd1498Szrj case TRUTH_NOT_EXPR:
912738fd1498Szrj return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
912838fd1498Szrj strict_overflow_p);
912938fd1498Szrj
913038fd1498Szrj case TRUTH_AND_EXPR:
913138fd1498Szrj case TRUTH_OR_EXPR:
913238fd1498Szrj case TRUTH_XOR_EXPR:
913338fd1498Szrj return tree_binary_nonzero_warnv_p (code, type,
913438fd1498Szrj TREE_OPERAND (t, 0),
913538fd1498Szrj TREE_OPERAND (t, 1),
913638fd1498Szrj strict_overflow_p);
913738fd1498Szrj
913838fd1498Szrj case COND_EXPR:
913938fd1498Szrj case CONSTRUCTOR:
914038fd1498Szrj case OBJ_TYPE_REF:
914138fd1498Szrj case ASSERT_EXPR:
914238fd1498Szrj case ADDR_EXPR:
914338fd1498Szrj case WITH_SIZE_EXPR:
914438fd1498Szrj case SSA_NAME:
914538fd1498Szrj return tree_single_nonzero_warnv_p (t, strict_overflow_p);
914638fd1498Szrj
914738fd1498Szrj case COMPOUND_EXPR:
914838fd1498Szrj case MODIFY_EXPR:
914938fd1498Szrj case BIND_EXPR:
915038fd1498Szrj return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
915138fd1498Szrj strict_overflow_p);
915238fd1498Szrj
915338fd1498Szrj case SAVE_EXPR:
915438fd1498Szrj return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
915538fd1498Szrj strict_overflow_p);
915638fd1498Szrj
915738fd1498Szrj case CALL_EXPR:
915838fd1498Szrj {
915938fd1498Szrj tree fndecl = get_callee_fndecl (t);
916038fd1498Szrj if (!fndecl) return false;
916138fd1498Szrj if (flag_delete_null_pointer_checks && !flag_check_new
916238fd1498Szrj && DECL_IS_OPERATOR_NEW (fndecl)
916338fd1498Szrj && !TREE_NOTHROW (fndecl))
916438fd1498Szrj return true;
916538fd1498Szrj if (flag_delete_null_pointer_checks
916638fd1498Szrj && lookup_attribute ("returns_nonnull",
916738fd1498Szrj TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
916838fd1498Szrj return true;
916938fd1498Szrj return alloca_call_p (t);
917038fd1498Szrj }
917138fd1498Szrj
917238fd1498Szrj default:
917338fd1498Szrj break;
917438fd1498Szrj }
917538fd1498Szrj return false;
917638fd1498Szrj }
917738fd1498Szrj
917838fd1498Szrj /* Return true when T is an address and is known to be nonzero.
917938fd1498Szrj Handle warnings about undefined signed overflow. */
918038fd1498Szrj
918138fd1498Szrj bool
tree_expr_nonzero_p(tree t)918238fd1498Szrj tree_expr_nonzero_p (tree t)
918338fd1498Szrj {
918438fd1498Szrj bool ret, strict_overflow_p;
918538fd1498Szrj
918638fd1498Szrj strict_overflow_p = false;
918738fd1498Szrj ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
918838fd1498Szrj if (strict_overflow_p)
918938fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur when "
919038fd1498Szrj "determining that expression is always "
919138fd1498Szrj "non-zero"),
919238fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
919338fd1498Szrj return ret;
919438fd1498Szrj }
919538fd1498Szrj
919638fd1498Szrj /* Return true if T is known not to be equal to an integer W. */
919738fd1498Szrj
919838fd1498Szrj bool
expr_not_equal_to(tree t,const wide_int & w)919938fd1498Szrj expr_not_equal_to (tree t, const wide_int &w)
920038fd1498Szrj {
920138fd1498Szrj wide_int min, max, nz;
920238fd1498Szrj value_range_type rtype;
920338fd1498Szrj switch (TREE_CODE (t))
920438fd1498Szrj {
920538fd1498Szrj case INTEGER_CST:
920638fd1498Szrj return wi::to_wide (t) != w;
920738fd1498Szrj
920838fd1498Szrj case SSA_NAME:
920938fd1498Szrj if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
921038fd1498Szrj return false;
921138fd1498Szrj rtype = get_range_info (t, &min, &max);
921238fd1498Szrj if (rtype == VR_RANGE)
921338fd1498Szrj {
921438fd1498Szrj if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
921538fd1498Szrj return true;
921638fd1498Szrj if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
921738fd1498Szrj return true;
921838fd1498Szrj }
921938fd1498Szrj else if (rtype == VR_ANTI_RANGE
922038fd1498Szrj && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
922138fd1498Szrj && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
922238fd1498Szrj return true;
922338fd1498Szrj /* If T has some known zero bits and W has any of those bits set,
922438fd1498Szrj then T is known not to be equal to W. */
922538fd1498Szrj if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
922638fd1498Szrj TYPE_PRECISION (TREE_TYPE (t))), 0))
922738fd1498Szrj return true;
922838fd1498Szrj return false;
922938fd1498Szrj
923038fd1498Szrj default:
923138fd1498Szrj return false;
923238fd1498Szrj }
923338fd1498Szrj }
923438fd1498Szrj
923538fd1498Szrj /* Fold a binary expression of code CODE and type TYPE with operands
923638fd1498Szrj OP0 and OP1. LOC is the location of the resulting expression.
923738fd1498Szrj Return the folded expression if folding is successful. Otherwise,
923838fd1498Szrj return NULL_TREE. */
923938fd1498Szrj
924038fd1498Szrj tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)924138fd1498Szrj fold_binary_loc (location_t loc, enum tree_code code, tree type,
924238fd1498Szrj tree op0, tree op1)
924338fd1498Szrj {
924438fd1498Szrj enum tree_code_class kind = TREE_CODE_CLASS (code);
924538fd1498Szrj tree arg0, arg1, tem;
924638fd1498Szrj tree t1 = NULL_TREE;
924738fd1498Szrj bool strict_overflow_p;
924838fd1498Szrj unsigned int prec;
924938fd1498Szrj
925038fd1498Szrj gcc_assert (IS_EXPR_CODE_CLASS (kind)
925138fd1498Szrj && TREE_CODE_LENGTH (code) == 2
925238fd1498Szrj && op0 != NULL_TREE
925338fd1498Szrj && op1 != NULL_TREE);
925438fd1498Szrj
925538fd1498Szrj arg0 = op0;
925638fd1498Szrj arg1 = op1;
925738fd1498Szrj
925838fd1498Szrj /* Strip any conversions that don't change the mode. This is
925938fd1498Szrj safe for every expression, except for a comparison expression
926038fd1498Szrj because its signedness is derived from its operands. So, in
926138fd1498Szrj the latter case, only strip conversions that don't change the
926238fd1498Szrj signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
926338fd1498Szrj preserved.
926438fd1498Szrj
926538fd1498Szrj Note that this is done as an internal manipulation within the
926638fd1498Szrj constant folder, in order to find the simplest representation
926738fd1498Szrj of the arguments so that their form can be studied. In any
926838fd1498Szrj cases, the appropriate type conversions should be put back in
926938fd1498Szrj the tree that will get out of the constant folder. */
927038fd1498Szrj
927138fd1498Szrj if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
927238fd1498Szrj {
927338fd1498Szrj STRIP_SIGN_NOPS (arg0);
927438fd1498Szrj STRIP_SIGN_NOPS (arg1);
927538fd1498Szrj }
927638fd1498Szrj else
927738fd1498Szrj {
927838fd1498Szrj STRIP_NOPS (arg0);
927938fd1498Szrj STRIP_NOPS (arg1);
928038fd1498Szrj }
928138fd1498Szrj
928238fd1498Szrj /* Note that TREE_CONSTANT isn't enough: static var addresses are
928338fd1498Szrj constant but we can't do arithmetic on them. */
928438fd1498Szrj if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
928538fd1498Szrj {
928638fd1498Szrj tem = const_binop (code, type, arg0, arg1);
928738fd1498Szrj if (tem != NULL_TREE)
928838fd1498Szrj {
928938fd1498Szrj if (TREE_TYPE (tem) != type)
929038fd1498Szrj tem = fold_convert_loc (loc, type, tem);
929138fd1498Szrj return tem;
929238fd1498Szrj }
929338fd1498Szrj }
929438fd1498Szrj
929538fd1498Szrj /* If this is a commutative operation, and ARG0 is a constant, move it
929638fd1498Szrj to ARG1 to reduce the number of tests below. */
929738fd1498Szrj if (commutative_tree_code (code)
929838fd1498Szrj && tree_swap_operands_p (arg0, arg1))
929938fd1498Szrj return fold_build2_loc (loc, code, type, op1, op0);
930038fd1498Szrj
930138fd1498Szrj /* Likewise if this is a comparison, and ARG0 is a constant, move it
930238fd1498Szrj to ARG1 to reduce the number of tests below. */
930338fd1498Szrj if (kind == tcc_comparison
930438fd1498Szrj && tree_swap_operands_p (arg0, arg1))
930538fd1498Szrj return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
930638fd1498Szrj
930738fd1498Szrj tem = generic_simplify (loc, code, type, op0, op1);
930838fd1498Szrj if (tem)
930938fd1498Szrj return tem;
931038fd1498Szrj
931138fd1498Szrj /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
931238fd1498Szrj
931338fd1498Szrj First check for cases where an arithmetic operation is applied to a
931438fd1498Szrj compound, conditional, or comparison operation. Push the arithmetic
931538fd1498Szrj operation inside the compound or conditional to see if any folding
931638fd1498Szrj can then be done. Convert comparison to conditional for this purpose.
931738fd1498Szrj The also optimizes non-constant cases that used to be done in
931838fd1498Szrj expand_expr.
931938fd1498Szrj
932038fd1498Szrj Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
932138fd1498Szrj one of the operands is a comparison and the other is a comparison, a
932238fd1498Szrj BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
932338fd1498Szrj code below would make the expression more complex. Change it to a
932438fd1498Szrj TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
932538fd1498Szrj TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
932638fd1498Szrj
932738fd1498Szrj if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
932838fd1498Szrj || code == EQ_EXPR || code == NE_EXPR)
932938fd1498Szrj && !VECTOR_TYPE_P (TREE_TYPE (arg0))
933038fd1498Szrj && ((truth_value_p (TREE_CODE (arg0))
933138fd1498Szrj && (truth_value_p (TREE_CODE (arg1))
933238fd1498Szrj || (TREE_CODE (arg1) == BIT_AND_EXPR
933338fd1498Szrj && integer_onep (TREE_OPERAND (arg1, 1)))))
933438fd1498Szrj || (truth_value_p (TREE_CODE (arg1))
933538fd1498Szrj && (truth_value_p (TREE_CODE (arg0))
933638fd1498Szrj || (TREE_CODE (arg0) == BIT_AND_EXPR
933738fd1498Szrj && integer_onep (TREE_OPERAND (arg0, 1)))))))
933838fd1498Szrj {
933938fd1498Szrj tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
934038fd1498Szrj : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
934138fd1498Szrj : TRUTH_XOR_EXPR,
934238fd1498Szrj boolean_type_node,
934338fd1498Szrj fold_convert_loc (loc, boolean_type_node, arg0),
934438fd1498Szrj fold_convert_loc (loc, boolean_type_node, arg1));
934538fd1498Szrj
934638fd1498Szrj if (code == EQ_EXPR)
934738fd1498Szrj tem = invert_truthvalue_loc (loc, tem);
934838fd1498Szrj
934938fd1498Szrj return fold_convert_loc (loc, type, tem);
935038fd1498Szrj }
935138fd1498Szrj
935238fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_binary
935338fd1498Szrj || TREE_CODE_CLASS (code) == tcc_comparison)
935438fd1498Szrj {
935538fd1498Szrj if (TREE_CODE (arg0) == COMPOUND_EXPR)
935638fd1498Szrj {
935738fd1498Szrj tem = fold_build2_loc (loc, code, type,
935838fd1498Szrj fold_convert_loc (loc, TREE_TYPE (op0),
935938fd1498Szrj TREE_OPERAND (arg0, 1)), op1);
936038fd1498Szrj return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
936138fd1498Szrj tem);
936238fd1498Szrj }
936338fd1498Szrj if (TREE_CODE (arg1) == COMPOUND_EXPR)
936438fd1498Szrj {
936538fd1498Szrj tem = fold_build2_loc (loc, code, type, op0,
936638fd1498Szrj fold_convert_loc (loc, TREE_TYPE (op1),
936738fd1498Szrj TREE_OPERAND (arg1, 1)));
936838fd1498Szrj return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
936938fd1498Szrj tem);
937038fd1498Szrj }
937138fd1498Szrj
937238fd1498Szrj if (TREE_CODE (arg0) == COND_EXPR
937338fd1498Szrj || TREE_CODE (arg0) == VEC_COND_EXPR
937438fd1498Szrj || COMPARISON_CLASS_P (arg0))
937538fd1498Szrj {
937638fd1498Szrj tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
937738fd1498Szrj arg0, arg1,
937838fd1498Szrj /*cond_first_p=*/1);
937938fd1498Szrj if (tem != NULL_TREE)
938038fd1498Szrj return tem;
938138fd1498Szrj }
938238fd1498Szrj
938338fd1498Szrj if (TREE_CODE (arg1) == COND_EXPR
938438fd1498Szrj || TREE_CODE (arg1) == VEC_COND_EXPR
938538fd1498Szrj || COMPARISON_CLASS_P (arg1))
938638fd1498Szrj {
938738fd1498Szrj tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
938838fd1498Szrj arg1, arg0,
938938fd1498Szrj /*cond_first_p=*/0);
939038fd1498Szrj if (tem != NULL_TREE)
939138fd1498Szrj return tem;
939238fd1498Szrj }
939338fd1498Szrj }
939438fd1498Szrj
939538fd1498Szrj switch (code)
939638fd1498Szrj {
939738fd1498Szrj case MEM_REF:
939838fd1498Szrj /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
939938fd1498Szrj if (TREE_CODE (arg0) == ADDR_EXPR
940038fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
940138fd1498Szrj {
940238fd1498Szrj tree iref = TREE_OPERAND (arg0, 0);
940338fd1498Szrj return fold_build2 (MEM_REF, type,
940438fd1498Szrj TREE_OPERAND (iref, 0),
940538fd1498Szrj int_const_binop (PLUS_EXPR, arg1,
940638fd1498Szrj TREE_OPERAND (iref, 1)));
940738fd1498Szrj }
940838fd1498Szrj
940938fd1498Szrj /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
941038fd1498Szrj if (TREE_CODE (arg0) == ADDR_EXPR
941138fd1498Szrj && handled_component_p (TREE_OPERAND (arg0, 0)))
941238fd1498Szrj {
941338fd1498Szrj tree base;
941438fd1498Szrj poly_int64 coffset;
941538fd1498Szrj base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
941638fd1498Szrj &coffset);
941738fd1498Szrj if (!base)
941838fd1498Szrj return NULL_TREE;
941938fd1498Szrj return fold_build2 (MEM_REF, type,
942038fd1498Szrj build_fold_addr_expr (base),
942138fd1498Szrj int_const_binop (PLUS_EXPR, arg1,
942238fd1498Szrj size_int (coffset)));
942338fd1498Szrj }
942438fd1498Szrj
942538fd1498Szrj return NULL_TREE;
942638fd1498Szrj
942738fd1498Szrj case POINTER_PLUS_EXPR:
942838fd1498Szrj /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
942938fd1498Szrj if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
943038fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
943138fd1498Szrj return fold_convert_loc (loc, type,
943238fd1498Szrj fold_build2_loc (loc, PLUS_EXPR, sizetype,
943338fd1498Szrj fold_convert_loc (loc, sizetype,
943438fd1498Szrj arg1),
943538fd1498Szrj fold_convert_loc (loc, sizetype,
943638fd1498Szrj arg0)));
943738fd1498Szrj
943838fd1498Szrj return NULL_TREE;
943938fd1498Szrj
944038fd1498Szrj case PLUS_EXPR:
944138fd1498Szrj if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
944238fd1498Szrj {
944338fd1498Szrj /* X + (X / CST) * -CST is X % CST. */
944438fd1498Szrj if (TREE_CODE (arg1) == MULT_EXPR
944538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
944638fd1498Szrj && operand_equal_p (arg0,
944738fd1498Szrj TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
944838fd1498Szrj {
944938fd1498Szrj tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
945038fd1498Szrj tree cst1 = TREE_OPERAND (arg1, 1);
945138fd1498Szrj tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
945238fd1498Szrj cst1, cst0);
945338fd1498Szrj if (sum && integer_zerop (sum))
945438fd1498Szrj return fold_convert_loc (loc, type,
945538fd1498Szrj fold_build2_loc (loc, TRUNC_MOD_EXPR,
945638fd1498Szrj TREE_TYPE (arg0), arg0,
945738fd1498Szrj cst0));
945838fd1498Szrj }
945938fd1498Szrj }
946038fd1498Szrj
946138fd1498Szrj /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
946238fd1498Szrj one. Make sure the type is not saturating and has the signedness of
946338fd1498Szrj the stripped operands, as fold_plusminus_mult_expr will re-associate.
946438fd1498Szrj ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
946538fd1498Szrj if ((TREE_CODE (arg0) == MULT_EXPR
946638fd1498Szrj || TREE_CODE (arg1) == MULT_EXPR)
946738fd1498Szrj && !TYPE_SATURATING (type)
946838fd1498Szrj && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
946938fd1498Szrj && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
947038fd1498Szrj && (!FLOAT_TYPE_P (type) || flag_associative_math))
947138fd1498Szrj {
947238fd1498Szrj tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
947338fd1498Szrj if (tem)
947438fd1498Szrj return tem;
947538fd1498Szrj }
947638fd1498Szrj
947738fd1498Szrj if (! FLOAT_TYPE_P (type))
947838fd1498Szrj {
947938fd1498Szrj /* Reassociate (plus (plus (mult) (foo)) (mult)) as
948038fd1498Szrj (plus (plus (mult) (mult)) (foo)) so that we can
948138fd1498Szrj take advantage of the factoring cases below. */
948238fd1498Szrj if (ANY_INTEGRAL_TYPE_P (type)
948338fd1498Szrj && TYPE_OVERFLOW_WRAPS (type)
948438fd1498Szrj && (((TREE_CODE (arg0) == PLUS_EXPR
948538fd1498Szrj || TREE_CODE (arg0) == MINUS_EXPR)
948638fd1498Szrj && TREE_CODE (arg1) == MULT_EXPR)
948738fd1498Szrj || ((TREE_CODE (arg1) == PLUS_EXPR
948838fd1498Szrj || TREE_CODE (arg1) == MINUS_EXPR)
948938fd1498Szrj && TREE_CODE (arg0) == MULT_EXPR)))
949038fd1498Szrj {
949138fd1498Szrj tree parg0, parg1, parg, marg;
949238fd1498Szrj enum tree_code pcode;
949338fd1498Szrj
949438fd1498Szrj if (TREE_CODE (arg1) == MULT_EXPR)
949538fd1498Szrj parg = arg0, marg = arg1;
949638fd1498Szrj else
949738fd1498Szrj parg = arg1, marg = arg0;
949838fd1498Szrj pcode = TREE_CODE (parg);
949938fd1498Szrj parg0 = TREE_OPERAND (parg, 0);
950038fd1498Szrj parg1 = TREE_OPERAND (parg, 1);
950138fd1498Szrj STRIP_NOPS (parg0);
950238fd1498Szrj STRIP_NOPS (parg1);
950338fd1498Szrj
950438fd1498Szrj if (TREE_CODE (parg0) == MULT_EXPR
950538fd1498Szrj && TREE_CODE (parg1) != MULT_EXPR)
950638fd1498Szrj return fold_build2_loc (loc, pcode, type,
950738fd1498Szrj fold_build2_loc (loc, PLUS_EXPR, type,
950838fd1498Szrj fold_convert_loc (loc, type,
950938fd1498Szrj parg0),
951038fd1498Szrj fold_convert_loc (loc, type,
951138fd1498Szrj marg)),
951238fd1498Szrj fold_convert_loc (loc, type, parg1));
951338fd1498Szrj if (TREE_CODE (parg0) != MULT_EXPR
951438fd1498Szrj && TREE_CODE (parg1) == MULT_EXPR)
951538fd1498Szrj return
951638fd1498Szrj fold_build2_loc (loc, PLUS_EXPR, type,
951738fd1498Szrj fold_convert_loc (loc, type, parg0),
951838fd1498Szrj fold_build2_loc (loc, pcode, type,
951938fd1498Szrj fold_convert_loc (loc, type, marg),
952038fd1498Szrj fold_convert_loc (loc, type,
952138fd1498Szrj parg1)));
952238fd1498Szrj }
952338fd1498Szrj }
952438fd1498Szrj else
952538fd1498Szrj {
952638fd1498Szrj /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
952738fd1498Szrj to __complex__ ( x, y ). This is not the same for SNaNs or
952838fd1498Szrj if signed zeros are involved. */
952938fd1498Szrj if (!HONOR_SNANS (element_mode (arg0))
953038fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (arg0))
953138fd1498Szrj && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
953238fd1498Szrj {
953338fd1498Szrj tree rtype = TREE_TYPE (TREE_TYPE (arg0));
953438fd1498Szrj tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
953538fd1498Szrj tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
953638fd1498Szrj bool arg0rz = false, arg0iz = false;
953738fd1498Szrj if ((arg0r && (arg0rz = real_zerop (arg0r)))
953838fd1498Szrj || (arg0i && (arg0iz = real_zerop (arg0i))))
953938fd1498Szrj {
954038fd1498Szrj tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
954138fd1498Szrj tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
954238fd1498Szrj if (arg0rz && arg1i && real_zerop (arg1i))
954338fd1498Szrj {
954438fd1498Szrj tree rp = arg1r ? arg1r
954538fd1498Szrj : build1 (REALPART_EXPR, rtype, arg1);
954638fd1498Szrj tree ip = arg0i ? arg0i
954738fd1498Szrj : build1 (IMAGPART_EXPR, rtype, arg0);
954838fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
954938fd1498Szrj }
955038fd1498Szrj else if (arg0iz && arg1r && real_zerop (arg1r))
955138fd1498Szrj {
955238fd1498Szrj tree rp = arg0r ? arg0r
955338fd1498Szrj : build1 (REALPART_EXPR, rtype, arg0);
955438fd1498Szrj tree ip = arg1i ? arg1i
955538fd1498Szrj : build1 (IMAGPART_EXPR, rtype, arg1);
955638fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
955738fd1498Szrj }
955838fd1498Szrj }
955938fd1498Szrj }
956038fd1498Szrj
956138fd1498Szrj /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
956238fd1498Szrj We associate floats only if the user has specified
956338fd1498Szrj -fassociative-math. */
956438fd1498Szrj if (flag_associative_math
956538fd1498Szrj && TREE_CODE (arg1) == PLUS_EXPR
956638fd1498Szrj && TREE_CODE (arg0) != MULT_EXPR)
956738fd1498Szrj {
956838fd1498Szrj tree tree10 = TREE_OPERAND (arg1, 0);
956938fd1498Szrj tree tree11 = TREE_OPERAND (arg1, 1);
957038fd1498Szrj if (TREE_CODE (tree11) == MULT_EXPR
957138fd1498Szrj && TREE_CODE (tree10) == MULT_EXPR)
957238fd1498Szrj {
957338fd1498Szrj tree tree0;
957438fd1498Szrj tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
957538fd1498Szrj return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
957638fd1498Szrj }
957738fd1498Szrj }
957838fd1498Szrj /* Convert (b*c + d*e) + a into b*c + (d*e +a).
957938fd1498Szrj We associate floats only if the user has specified
958038fd1498Szrj -fassociative-math. */
958138fd1498Szrj if (flag_associative_math
958238fd1498Szrj && TREE_CODE (arg0) == PLUS_EXPR
958338fd1498Szrj && TREE_CODE (arg1) != MULT_EXPR)
958438fd1498Szrj {
958538fd1498Szrj tree tree00 = TREE_OPERAND (arg0, 0);
958638fd1498Szrj tree tree01 = TREE_OPERAND (arg0, 1);
958738fd1498Szrj if (TREE_CODE (tree01) == MULT_EXPR
958838fd1498Szrj && TREE_CODE (tree00) == MULT_EXPR)
958938fd1498Szrj {
959038fd1498Szrj tree tree0;
959138fd1498Szrj tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
959238fd1498Szrj return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
959338fd1498Szrj }
959438fd1498Szrj }
959538fd1498Szrj }
959638fd1498Szrj
959738fd1498Szrj bit_rotate:
959838fd1498Szrj /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
959938fd1498Szrj is a rotate of A by C1 bits. */
960038fd1498Szrj /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
960138fd1498Szrj is a rotate of A by B bits.
960238fd1498Szrj Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
960338fd1498Szrj though in this case CODE must be | and not + or ^, otherwise
960438fd1498Szrj it doesn't return A when B is 0. */
960538fd1498Szrj {
960638fd1498Szrj enum tree_code code0, code1;
960738fd1498Szrj tree rtype;
960838fd1498Szrj code0 = TREE_CODE (arg0);
960938fd1498Szrj code1 = TREE_CODE (arg1);
961038fd1498Szrj if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
961138fd1498Szrj || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
961238fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0),
961338fd1498Szrj TREE_OPERAND (arg1, 0), 0)
961438fd1498Szrj && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
961538fd1498Szrj TYPE_UNSIGNED (rtype))
961638fd1498Szrj /* Only create rotates in complete modes. Other cases are not
961738fd1498Szrj expanded properly. */
961838fd1498Szrj && (element_precision (rtype)
961938fd1498Szrj == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
962038fd1498Szrj {
962138fd1498Szrj tree tree01, tree11;
962238fd1498Szrj tree orig_tree01, orig_tree11;
962338fd1498Szrj enum tree_code code01, code11;
962438fd1498Szrj
962538fd1498Szrj tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
962638fd1498Szrj tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
962738fd1498Szrj STRIP_NOPS (tree01);
962838fd1498Szrj STRIP_NOPS (tree11);
962938fd1498Szrj code01 = TREE_CODE (tree01);
963038fd1498Szrj code11 = TREE_CODE (tree11);
963138fd1498Szrj if (code11 != MINUS_EXPR
963238fd1498Szrj && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
963338fd1498Szrj {
963438fd1498Szrj std::swap (code0, code1);
963538fd1498Szrj std::swap (code01, code11);
963638fd1498Szrj std::swap (tree01, tree11);
963738fd1498Szrj std::swap (orig_tree01, orig_tree11);
963838fd1498Szrj }
963938fd1498Szrj if (code01 == INTEGER_CST
964038fd1498Szrj && code11 == INTEGER_CST
964138fd1498Szrj && (wi::to_widest (tree01) + wi::to_widest (tree11)
964238fd1498Szrj == element_precision (rtype)))
964338fd1498Szrj {
964438fd1498Szrj tem = build2_loc (loc, LROTATE_EXPR,
964538fd1498Szrj rtype, TREE_OPERAND (arg0, 0),
964638fd1498Szrj code0 == LSHIFT_EXPR
964738fd1498Szrj ? orig_tree01 : orig_tree11);
964838fd1498Szrj return fold_convert_loc (loc, type, tem);
964938fd1498Szrj }
965038fd1498Szrj else if (code11 == MINUS_EXPR)
965138fd1498Szrj {
965238fd1498Szrj tree tree110, tree111;
965338fd1498Szrj tree110 = TREE_OPERAND (tree11, 0);
965438fd1498Szrj tree111 = TREE_OPERAND (tree11, 1);
965538fd1498Szrj STRIP_NOPS (tree110);
965638fd1498Szrj STRIP_NOPS (tree111);
965738fd1498Szrj if (TREE_CODE (tree110) == INTEGER_CST
965838fd1498Szrj && compare_tree_int (tree110,
965938fd1498Szrj element_precision (rtype)) == 0
966038fd1498Szrj && operand_equal_p (tree01, tree111, 0))
966138fd1498Szrj {
966238fd1498Szrj tem = build2_loc (loc, (code0 == LSHIFT_EXPR
966338fd1498Szrj ? LROTATE_EXPR : RROTATE_EXPR),
966438fd1498Szrj rtype, TREE_OPERAND (arg0, 0),
966538fd1498Szrj orig_tree01);
966638fd1498Szrj return fold_convert_loc (loc, type, tem);
966738fd1498Szrj }
966838fd1498Szrj }
966938fd1498Szrj else if (code == BIT_IOR_EXPR
967038fd1498Szrj && code11 == BIT_AND_EXPR
967138fd1498Szrj && pow2p_hwi (element_precision (rtype)))
967238fd1498Szrj {
967338fd1498Szrj tree tree110, tree111;
967438fd1498Szrj tree110 = TREE_OPERAND (tree11, 0);
967538fd1498Szrj tree111 = TREE_OPERAND (tree11, 1);
967638fd1498Szrj STRIP_NOPS (tree110);
967738fd1498Szrj STRIP_NOPS (tree111);
967838fd1498Szrj if (TREE_CODE (tree110) == NEGATE_EXPR
967938fd1498Szrj && TREE_CODE (tree111) == INTEGER_CST
968038fd1498Szrj && compare_tree_int (tree111,
968138fd1498Szrj element_precision (rtype) - 1) == 0
968238fd1498Szrj && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
968338fd1498Szrj {
968438fd1498Szrj tem = build2_loc (loc, (code0 == LSHIFT_EXPR
968538fd1498Szrj ? LROTATE_EXPR : RROTATE_EXPR),
968638fd1498Szrj rtype, TREE_OPERAND (arg0, 0),
968738fd1498Szrj orig_tree01);
968838fd1498Szrj return fold_convert_loc (loc, type, tem);
968938fd1498Szrj }
969038fd1498Szrj }
969138fd1498Szrj }
969238fd1498Szrj }
969338fd1498Szrj
969438fd1498Szrj associate:
969538fd1498Szrj /* In most languages, can't associate operations on floats through
969638fd1498Szrj parentheses. Rather than remember where the parentheses were, we
969738fd1498Szrj don't associate floats at all, unless the user has specified
969838fd1498Szrj -fassociative-math.
969938fd1498Szrj And, we need to make sure type is not saturating. */
970038fd1498Szrj
970138fd1498Szrj if ((! FLOAT_TYPE_P (type) || flag_associative_math)
970238fd1498Szrj && !TYPE_SATURATING (type))
970338fd1498Szrj {
970438fd1498Szrj tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
970538fd1498Szrj tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
970638fd1498Szrj tree atype = type;
970738fd1498Szrj bool ok = true;
970838fd1498Szrj
970938fd1498Szrj /* Split both trees into variables, constants, and literals. Then
971038fd1498Szrj associate each group together, the constants with literals,
971138fd1498Szrj then the result with variables. This increases the chances of
971238fd1498Szrj literals being recombined later and of generating relocatable
971338fd1498Szrj expressions for the sum of a constant and literal. */
971438fd1498Szrj var0 = split_tree (arg0, type, code,
971538fd1498Szrj &minus_var0, &con0, &minus_con0,
971638fd1498Szrj &lit0, &minus_lit0, 0);
971738fd1498Szrj var1 = split_tree (arg1, type, code,
971838fd1498Szrj &minus_var1, &con1, &minus_con1,
971938fd1498Szrj &lit1, &minus_lit1, code == MINUS_EXPR);
972038fd1498Szrj
972138fd1498Szrj /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
972238fd1498Szrj if (code == MINUS_EXPR)
972338fd1498Szrj code = PLUS_EXPR;
972438fd1498Szrj
972538fd1498Szrj /* With undefined overflow prefer doing association in a type
972638fd1498Szrj which wraps on overflow, if that is one of the operand types. */
972738fd1498Szrj if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
972838fd1498Szrj && !TYPE_OVERFLOW_WRAPS (type))
972938fd1498Szrj {
973038fd1498Szrj if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
973138fd1498Szrj && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
973238fd1498Szrj atype = TREE_TYPE (arg0);
973338fd1498Szrj else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
973438fd1498Szrj && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
973538fd1498Szrj atype = TREE_TYPE (arg1);
973638fd1498Szrj gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
973738fd1498Szrj }
973838fd1498Szrj
973938fd1498Szrj /* With undefined overflow we can only associate constants with one
974038fd1498Szrj variable, and constants whose association doesn't overflow. */
974138fd1498Szrj if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
974238fd1498Szrj && !TYPE_OVERFLOW_WRAPS (atype))
974338fd1498Szrj {
974438fd1498Szrj if ((var0 && var1) || (minus_var0 && minus_var1))
974538fd1498Szrj {
974638fd1498Szrj /* ??? If split_tree would handle NEGATE_EXPR we could
974738fd1498Szrj simply reject these cases and the allowed cases would
974838fd1498Szrj be the var0/minus_var1 ones. */
974938fd1498Szrj tree tmp0 = var0 ? var0 : minus_var0;
975038fd1498Szrj tree tmp1 = var1 ? var1 : minus_var1;
975138fd1498Szrj bool one_neg = false;
975238fd1498Szrj
975338fd1498Szrj if (TREE_CODE (tmp0) == NEGATE_EXPR)
975438fd1498Szrj {
975538fd1498Szrj tmp0 = TREE_OPERAND (tmp0, 0);
975638fd1498Szrj one_neg = !one_neg;
975738fd1498Szrj }
975838fd1498Szrj if (CONVERT_EXPR_P (tmp0)
975938fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
976038fd1498Szrj && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
976138fd1498Szrj <= TYPE_PRECISION (atype)))
976238fd1498Szrj tmp0 = TREE_OPERAND (tmp0, 0);
976338fd1498Szrj if (TREE_CODE (tmp1) == NEGATE_EXPR)
976438fd1498Szrj {
976538fd1498Szrj tmp1 = TREE_OPERAND (tmp1, 0);
976638fd1498Szrj one_neg = !one_neg;
976738fd1498Szrj }
976838fd1498Szrj if (CONVERT_EXPR_P (tmp1)
976938fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
977038fd1498Szrj && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
977138fd1498Szrj <= TYPE_PRECISION (atype)))
977238fd1498Szrj tmp1 = TREE_OPERAND (tmp1, 0);
977338fd1498Szrj /* The only case we can still associate with two variables
977438fd1498Szrj is if they cancel out. */
977538fd1498Szrj if (!one_neg
977638fd1498Szrj || !operand_equal_p (tmp0, tmp1, 0))
977738fd1498Szrj ok = false;
977838fd1498Szrj }
977938fd1498Szrj else if ((var0 && minus_var1
978038fd1498Szrj && ! operand_equal_p (var0, minus_var1, 0))
978138fd1498Szrj || (minus_var0 && var1
978238fd1498Szrj && ! operand_equal_p (minus_var0, var1, 0)))
978338fd1498Szrj ok = false;
978438fd1498Szrj }
978538fd1498Szrj
978638fd1498Szrj /* Only do something if we found more than two objects. Otherwise,
978738fd1498Szrj nothing has changed and we risk infinite recursion. */
978838fd1498Szrj if (ok
978938fd1498Szrj && ((var0 != 0) + (var1 != 0)
979038fd1498Szrj + (minus_var0 != 0) + (minus_var1 != 0)
979138fd1498Szrj + (con0 != 0) + (con1 != 0)
979238fd1498Szrj + (minus_con0 != 0) + (minus_con1 != 0)
979338fd1498Szrj + (lit0 != 0) + (lit1 != 0)
979438fd1498Szrj + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
979538fd1498Szrj {
979638fd1498Szrj var0 = associate_trees (loc, var0, var1, code, atype);
979738fd1498Szrj minus_var0 = associate_trees (loc, minus_var0, minus_var1,
979838fd1498Szrj code, atype);
979938fd1498Szrj con0 = associate_trees (loc, con0, con1, code, atype);
980038fd1498Szrj minus_con0 = associate_trees (loc, minus_con0, minus_con1,
980138fd1498Szrj code, atype);
980238fd1498Szrj lit0 = associate_trees (loc, lit0, lit1, code, atype);
980338fd1498Szrj minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
980438fd1498Szrj code, atype);
980538fd1498Szrj
980638fd1498Szrj if (minus_var0 && var0)
980738fd1498Szrj {
980838fd1498Szrj var0 = associate_trees (loc, var0, minus_var0,
980938fd1498Szrj MINUS_EXPR, atype);
981038fd1498Szrj minus_var0 = 0;
981138fd1498Szrj }
981238fd1498Szrj if (minus_con0 && con0)
981338fd1498Szrj {
981438fd1498Szrj con0 = associate_trees (loc, con0, minus_con0,
981538fd1498Szrj MINUS_EXPR, atype);
981638fd1498Szrj minus_con0 = 0;
981738fd1498Szrj }
981838fd1498Szrj
981938fd1498Szrj /* Preserve the MINUS_EXPR if the negative part of the literal is
982038fd1498Szrj greater than the positive part. Otherwise, the multiplicative
982138fd1498Szrj folding code (i.e extract_muldiv) may be fooled in case
982238fd1498Szrj unsigned constants are subtracted, like in the following
982338fd1498Szrj example: ((X*2 + 4) - 8U)/2. */
982438fd1498Szrj if (minus_lit0 && lit0)
982538fd1498Szrj {
982638fd1498Szrj if (TREE_CODE (lit0) == INTEGER_CST
982738fd1498Szrj && TREE_CODE (minus_lit0) == INTEGER_CST
982838fd1498Szrj && tree_int_cst_lt (lit0, minus_lit0)
982938fd1498Szrj /* But avoid ending up with only negated parts. */
983038fd1498Szrj && (var0 || con0))
983138fd1498Szrj {
983238fd1498Szrj minus_lit0 = associate_trees (loc, minus_lit0, lit0,
983338fd1498Szrj MINUS_EXPR, atype);
983438fd1498Szrj lit0 = 0;
983538fd1498Szrj }
983638fd1498Szrj else
983738fd1498Szrj {
983838fd1498Szrj lit0 = associate_trees (loc, lit0, minus_lit0,
983938fd1498Szrj MINUS_EXPR, atype);
984038fd1498Szrj minus_lit0 = 0;
984138fd1498Szrj }
984238fd1498Szrj }
984338fd1498Szrj
984438fd1498Szrj /* Don't introduce overflows through reassociation. */
984538fd1498Szrj if ((lit0 && TREE_OVERFLOW_P (lit0))
984638fd1498Szrj || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
984738fd1498Szrj return NULL_TREE;
984838fd1498Szrj
984938fd1498Szrj /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
985038fd1498Szrj con0 = associate_trees (loc, con0, lit0, code, atype);
985138fd1498Szrj lit0 = 0;
985238fd1498Szrj minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
985338fd1498Szrj code, atype);
985438fd1498Szrj minus_lit0 = 0;
985538fd1498Szrj
985638fd1498Szrj /* Eliminate minus_con0. */
985738fd1498Szrj if (minus_con0)
985838fd1498Szrj {
985938fd1498Szrj if (con0)
986038fd1498Szrj con0 = associate_trees (loc, con0, minus_con0,
986138fd1498Szrj MINUS_EXPR, atype);
986238fd1498Szrj else if (var0)
986338fd1498Szrj var0 = associate_trees (loc, var0, minus_con0,
986438fd1498Szrj MINUS_EXPR, atype);
986538fd1498Szrj else
986638fd1498Szrj gcc_unreachable ();
986738fd1498Szrj minus_con0 = 0;
986838fd1498Szrj }
986938fd1498Szrj
987038fd1498Szrj /* Eliminate minus_var0. */
987138fd1498Szrj if (minus_var0)
987238fd1498Szrj {
987338fd1498Szrj if (con0)
987438fd1498Szrj con0 = associate_trees (loc, con0, minus_var0,
987538fd1498Szrj MINUS_EXPR, atype);
987638fd1498Szrj else
987738fd1498Szrj gcc_unreachable ();
987838fd1498Szrj minus_var0 = 0;
987938fd1498Szrj }
988038fd1498Szrj
988138fd1498Szrj return
988238fd1498Szrj fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
988338fd1498Szrj code, atype));
988438fd1498Szrj }
988538fd1498Szrj }
988638fd1498Szrj
988738fd1498Szrj return NULL_TREE;
988838fd1498Szrj
988938fd1498Szrj case POINTER_DIFF_EXPR:
989038fd1498Szrj case MINUS_EXPR:
989138fd1498Szrj /* Fold &a[i] - &a[j] to i-j. */
989238fd1498Szrj if (TREE_CODE (arg0) == ADDR_EXPR
989338fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
989438fd1498Szrj && TREE_CODE (arg1) == ADDR_EXPR
989538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
989638fd1498Szrj {
989738fd1498Szrj tree tem = fold_addr_of_array_ref_difference (loc, type,
989838fd1498Szrj TREE_OPERAND (arg0, 0),
989938fd1498Szrj TREE_OPERAND (arg1, 0),
990038fd1498Szrj code
990138fd1498Szrj == POINTER_DIFF_EXPR);
990238fd1498Szrj if (tem)
990338fd1498Szrj return tem;
990438fd1498Szrj }
990538fd1498Szrj
990638fd1498Szrj /* Further transformations are not for pointers. */
990738fd1498Szrj if (code == POINTER_DIFF_EXPR)
990838fd1498Szrj return NULL_TREE;
990938fd1498Szrj
991038fd1498Szrj /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
991138fd1498Szrj if (TREE_CODE (arg0) == NEGATE_EXPR
991238fd1498Szrj && negate_expr_p (op1)
991338fd1498Szrj /* If arg0 is e.g. unsigned int and type is int, then this could
991438fd1498Szrj introduce UB, because if A is INT_MIN at runtime, the original
991538fd1498Szrj expression can be well defined while the latter is not.
991638fd1498Szrj See PR83269. */
991738fd1498Szrj && !(ANY_INTEGRAL_TYPE_P (type)
991838fd1498Szrj && TYPE_OVERFLOW_UNDEFINED (type)
991938fd1498Szrj && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
992038fd1498Szrj && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
992138fd1498Szrj return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
992238fd1498Szrj fold_convert_loc (loc, type,
992338fd1498Szrj TREE_OPERAND (arg0, 0)));
992438fd1498Szrj
992538fd1498Szrj /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
992638fd1498Szrj __complex__ ( x, -y ). This is not the same for SNaNs or if
992738fd1498Szrj signed zeros are involved. */
992838fd1498Szrj if (!HONOR_SNANS (element_mode (arg0))
992938fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (arg0))
993038fd1498Szrj && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
993138fd1498Szrj {
993238fd1498Szrj tree rtype = TREE_TYPE (TREE_TYPE (arg0));
993338fd1498Szrj tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
993438fd1498Szrj tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
993538fd1498Szrj bool arg0rz = false, arg0iz = false;
993638fd1498Szrj if ((arg0r && (arg0rz = real_zerop (arg0r)))
993738fd1498Szrj || (arg0i && (arg0iz = real_zerop (arg0i))))
993838fd1498Szrj {
993938fd1498Szrj tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
994038fd1498Szrj tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
994138fd1498Szrj if (arg0rz && arg1i && real_zerop (arg1i))
994238fd1498Szrj {
994338fd1498Szrj tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
994438fd1498Szrj arg1r ? arg1r
994538fd1498Szrj : build1 (REALPART_EXPR, rtype, arg1));
994638fd1498Szrj tree ip = arg0i ? arg0i
994738fd1498Szrj : build1 (IMAGPART_EXPR, rtype, arg0);
994838fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
994938fd1498Szrj }
995038fd1498Szrj else if (arg0iz && arg1r && real_zerop (arg1r))
995138fd1498Szrj {
995238fd1498Szrj tree rp = arg0r ? arg0r
995338fd1498Szrj : build1 (REALPART_EXPR, rtype, arg0);
995438fd1498Szrj tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
995538fd1498Szrj arg1i ? arg1i
995638fd1498Szrj : build1 (IMAGPART_EXPR, rtype, arg1));
995738fd1498Szrj return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
995838fd1498Szrj }
995938fd1498Szrj }
996038fd1498Szrj }
996138fd1498Szrj
996238fd1498Szrj /* A - B -> A + (-B) if B is easily negatable. */
996338fd1498Szrj if (negate_expr_p (op1)
996438fd1498Szrj && ! TYPE_OVERFLOW_SANITIZED (type)
996538fd1498Szrj && ((FLOAT_TYPE_P (type)
996638fd1498Szrj /* Avoid this transformation if B is a positive REAL_CST. */
996738fd1498Szrj && (TREE_CODE (op1) != REAL_CST
996838fd1498Szrj || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
996938fd1498Szrj || INTEGRAL_TYPE_P (type)))
997038fd1498Szrj return fold_build2_loc (loc, PLUS_EXPR, type,
997138fd1498Szrj fold_convert_loc (loc, type, arg0),
997238fd1498Szrj negate_expr (op1));
997338fd1498Szrj
997438fd1498Szrj /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
997538fd1498Szrj one. Make sure the type is not saturating and has the signedness of
997638fd1498Szrj the stripped operands, as fold_plusminus_mult_expr will re-associate.
997738fd1498Szrj ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
997838fd1498Szrj if ((TREE_CODE (arg0) == MULT_EXPR
997938fd1498Szrj || TREE_CODE (arg1) == MULT_EXPR)
998038fd1498Szrj && !TYPE_SATURATING (type)
998138fd1498Szrj && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
998238fd1498Szrj && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
998338fd1498Szrj && (!FLOAT_TYPE_P (type) || flag_associative_math))
998438fd1498Szrj {
998538fd1498Szrj tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
998638fd1498Szrj if (tem)
998738fd1498Szrj return tem;
998838fd1498Szrj }
998938fd1498Szrj
999038fd1498Szrj goto associate;
999138fd1498Szrj
999238fd1498Szrj case MULT_EXPR:
999338fd1498Szrj if (! FLOAT_TYPE_P (type))
999438fd1498Szrj {
999538fd1498Szrj /* Transform x * -C into -x * C if x is easily negatable. */
999638fd1498Szrj if (TREE_CODE (op1) == INTEGER_CST
999738fd1498Szrj && tree_int_cst_sgn (op1) == -1
999838fd1498Szrj && negate_expr_p (op0)
999938fd1498Szrj && negate_expr_p (op1)
1000038fd1498Szrj && (tem = negate_expr (op1)) != op1
1000138fd1498Szrj && ! TREE_OVERFLOW (tem))
1000238fd1498Szrj return fold_build2_loc (loc, MULT_EXPR, type,
1000338fd1498Szrj fold_convert_loc (loc, type,
1000438fd1498Szrj negate_expr (op0)), tem);
1000538fd1498Szrj
1000638fd1498Szrj strict_overflow_p = false;
1000738fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST
1000838fd1498Szrj && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
1000938fd1498Szrj &strict_overflow_p)) != 0)
1001038fd1498Szrj {
1001138fd1498Szrj if (strict_overflow_p)
1001238fd1498Szrj fold_overflow_warning (("assuming signed overflow does not "
1001338fd1498Szrj "occur when simplifying "
1001438fd1498Szrj "multiplication"),
1001538fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1001638fd1498Szrj return fold_convert_loc (loc, type, tem);
1001738fd1498Szrj }
1001838fd1498Szrj
1001938fd1498Szrj /* Optimize z * conj(z) for integer complex numbers. */
1002038fd1498Szrj if (TREE_CODE (arg0) == CONJ_EXPR
1002138fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
1002238fd1498Szrj return fold_mult_zconjz (loc, type, arg1);
1002338fd1498Szrj if (TREE_CODE (arg1) == CONJ_EXPR
1002438fd1498Szrj && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
1002538fd1498Szrj return fold_mult_zconjz (loc, type, arg0);
1002638fd1498Szrj }
1002738fd1498Szrj else
1002838fd1498Szrj {
1002938fd1498Szrj /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
1003038fd1498Szrj This is not the same for NaNs or if signed zeros are
1003138fd1498Szrj involved. */
1003238fd1498Szrj if (!HONOR_NANS (arg0)
1003338fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (arg0))
1003438fd1498Szrj && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
1003538fd1498Szrj && TREE_CODE (arg1) == COMPLEX_CST
1003638fd1498Szrj && real_zerop (TREE_REALPART (arg1)))
1003738fd1498Szrj {
1003838fd1498Szrj tree rtype = TREE_TYPE (TREE_TYPE (arg0));
1003938fd1498Szrj if (real_onep (TREE_IMAGPART (arg1)))
1004038fd1498Szrj return
1004138fd1498Szrj fold_build2_loc (loc, COMPLEX_EXPR, type,
1004238fd1498Szrj negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
1004338fd1498Szrj rtype, arg0)),
1004438fd1498Szrj fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
1004538fd1498Szrj else if (real_minus_onep (TREE_IMAGPART (arg1)))
1004638fd1498Szrj return
1004738fd1498Szrj fold_build2_loc (loc, COMPLEX_EXPR, type,
1004838fd1498Szrj fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
1004938fd1498Szrj negate_expr (fold_build1_loc (loc, REALPART_EXPR,
1005038fd1498Szrj rtype, arg0)));
1005138fd1498Szrj }
1005238fd1498Szrj
1005338fd1498Szrj /* Optimize z * conj(z) for floating point complex numbers.
1005438fd1498Szrj Guarded by flag_unsafe_math_optimizations as non-finite
1005538fd1498Szrj imaginary components don't produce scalar results. */
1005638fd1498Szrj if (flag_unsafe_math_optimizations
1005738fd1498Szrj && TREE_CODE (arg0) == CONJ_EXPR
1005838fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
1005938fd1498Szrj return fold_mult_zconjz (loc, type, arg1);
1006038fd1498Szrj if (flag_unsafe_math_optimizations
1006138fd1498Szrj && TREE_CODE (arg1) == CONJ_EXPR
1006238fd1498Szrj && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
1006338fd1498Szrj return fold_mult_zconjz (loc, type, arg0);
1006438fd1498Szrj }
1006538fd1498Szrj goto associate;
1006638fd1498Szrj
1006738fd1498Szrj case BIT_IOR_EXPR:
1006838fd1498Szrj /* Canonicalize (X & C1) | C2. */
1006938fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1007038fd1498Szrj && TREE_CODE (arg1) == INTEGER_CST
1007138fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
1007238fd1498Szrj {
1007338fd1498Szrj int width = TYPE_PRECISION (type), w;
1007438fd1498Szrj wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
1007538fd1498Szrj wide_int c2 = wi::to_wide (arg1);
1007638fd1498Szrj
1007738fd1498Szrj /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
1007838fd1498Szrj if ((c1 & c2) == c1)
1007938fd1498Szrj return omit_one_operand_loc (loc, type, arg1,
1008038fd1498Szrj TREE_OPERAND (arg0, 0));
1008138fd1498Szrj
1008238fd1498Szrj wide_int msk = wi::mask (width, false,
1008338fd1498Szrj TYPE_PRECISION (TREE_TYPE (arg1)));
1008438fd1498Szrj
1008538fd1498Szrj /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
1008638fd1498Szrj if (wi::bit_and_not (msk, c1 | c2) == 0)
1008738fd1498Szrj {
1008838fd1498Szrj tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
1008938fd1498Szrj return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
1009038fd1498Szrj }
1009138fd1498Szrj
1009238fd1498Szrj /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
1009338fd1498Szrj unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
1009438fd1498Szrj mode which allows further optimizations. */
1009538fd1498Szrj c1 &= msk;
1009638fd1498Szrj c2 &= msk;
1009738fd1498Szrj wide_int c3 = wi::bit_and_not (c1, c2);
1009838fd1498Szrj for (w = BITS_PER_UNIT; w <= width; w <<= 1)
1009938fd1498Szrj {
1010038fd1498Szrj wide_int mask = wi::mask (w, false,
1010138fd1498Szrj TYPE_PRECISION (type));
1010238fd1498Szrj if (((c1 | c2) & mask) == mask
1010338fd1498Szrj && wi::bit_and_not (c1, mask) == 0)
1010438fd1498Szrj {
1010538fd1498Szrj c3 = mask;
1010638fd1498Szrj break;
1010738fd1498Szrj }
1010838fd1498Szrj }
1010938fd1498Szrj
1011038fd1498Szrj if (c3 != c1)
1011138fd1498Szrj {
1011238fd1498Szrj tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
1011338fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
1011438fd1498Szrj wide_int_to_tree (type, c3));
1011538fd1498Szrj return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
1011638fd1498Szrj }
1011738fd1498Szrj }
1011838fd1498Szrj
1011938fd1498Szrj /* See if this can be simplified into a rotate first. If that
1012038fd1498Szrj is unsuccessful continue in the association code. */
1012138fd1498Szrj goto bit_rotate;
1012238fd1498Szrj
1012338fd1498Szrj case BIT_XOR_EXPR:
1012438fd1498Szrj /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
1012538fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1012638fd1498Szrj && INTEGRAL_TYPE_P (type)
1012738fd1498Szrj && integer_onep (TREE_OPERAND (arg0, 1))
1012838fd1498Szrj && integer_onep (arg1))
1012938fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, arg0,
1013038fd1498Szrj build_zero_cst (TREE_TYPE (arg0)));
1013138fd1498Szrj
1013238fd1498Szrj /* See if this can be simplified into a rotate first. If that
1013338fd1498Szrj is unsuccessful continue in the association code. */
1013438fd1498Szrj goto bit_rotate;
1013538fd1498Szrj
1013638fd1498Szrj case BIT_AND_EXPR:
1013738fd1498Szrj /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
1013838fd1498Szrj if (TREE_CODE (arg0) == BIT_XOR_EXPR
1013938fd1498Szrj && INTEGRAL_TYPE_P (type)
1014038fd1498Szrj && integer_onep (TREE_OPERAND (arg0, 1))
1014138fd1498Szrj && integer_onep (arg1))
1014238fd1498Szrj {
1014338fd1498Szrj tree tem2;
1014438fd1498Szrj tem = TREE_OPERAND (arg0, 0);
1014538fd1498Szrj tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
1014638fd1498Szrj tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
1014738fd1498Szrj tem, tem2);
1014838fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, tem2,
1014938fd1498Szrj build_zero_cst (TREE_TYPE (tem)));
1015038fd1498Szrj }
1015138fd1498Szrj /* Fold ~X & 1 as (X & 1) == 0. */
1015238fd1498Szrj if (TREE_CODE (arg0) == BIT_NOT_EXPR
1015338fd1498Szrj && INTEGRAL_TYPE_P (type)
1015438fd1498Szrj && integer_onep (arg1))
1015538fd1498Szrj {
1015638fd1498Szrj tree tem2;
1015738fd1498Szrj tem = TREE_OPERAND (arg0, 0);
1015838fd1498Szrj tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
1015938fd1498Szrj tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
1016038fd1498Szrj tem, tem2);
1016138fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, tem2,
1016238fd1498Szrj build_zero_cst (TREE_TYPE (tem)));
1016338fd1498Szrj }
1016438fd1498Szrj /* Fold !X & 1 as X == 0. */
1016538fd1498Szrj if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
1016638fd1498Szrj && integer_onep (arg1))
1016738fd1498Szrj {
1016838fd1498Szrj tem = TREE_OPERAND (arg0, 0);
1016938fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, tem,
1017038fd1498Szrj build_zero_cst (TREE_TYPE (tem)));
1017138fd1498Szrj }
1017238fd1498Szrj
1017338fd1498Szrj /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
1017438fd1498Szrj multiple of 1 << CST. */
1017538fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST)
1017638fd1498Szrj {
1017738fd1498Szrj wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
1017838fd1498Szrj wide_int ncst1 = -cst1;
1017938fd1498Szrj if ((cst1 & ncst1) == ncst1
1018038fd1498Szrj && multiple_of_p (type, arg0,
1018138fd1498Szrj wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
1018238fd1498Szrj return fold_convert_loc (loc, type, arg0);
1018338fd1498Szrj }
1018438fd1498Szrj
1018538fd1498Szrj /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
1018638fd1498Szrj bits from CST2. */
1018738fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST
1018838fd1498Szrj && TREE_CODE (arg0) == MULT_EXPR
1018938fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
1019038fd1498Szrj {
1019138fd1498Szrj wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
1019238fd1498Szrj wide_int masked
1019338fd1498Szrj = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
1019438fd1498Szrj
1019538fd1498Szrj if (masked == 0)
1019638fd1498Szrj return omit_two_operands_loc (loc, type, build_zero_cst (type),
1019738fd1498Szrj arg0, arg1);
1019838fd1498Szrj else if (masked != warg1)
1019938fd1498Szrj {
1020038fd1498Szrj /* Avoid the transform if arg1 is a mask of some
1020138fd1498Szrj mode which allows further optimizations. */
1020238fd1498Szrj int pop = wi::popcount (warg1);
1020338fd1498Szrj if (!(pop >= BITS_PER_UNIT
1020438fd1498Szrj && pow2p_hwi (pop)
1020538fd1498Szrj && wi::mask (pop, false, warg1.get_precision ()) == warg1))
1020638fd1498Szrj return fold_build2_loc (loc, code, type, op0,
1020738fd1498Szrj wide_int_to_tree (type, masked));
1020838fd1498Szrj }
1020938fd1498Szrj }
1021038fd1498Szrj
1021138fd1498Szrj /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
1021238fd1498Szrj ((A & N) + B) & M -> (A + B) & M
1021338fd1498Szrj Similarly if (N & M) == 0,
1021438fd1498Szrj ((A | N) + B) & M -> (A + B) & M
1021538fd1498Szrj and for - instead of + (or unary - instead of +)
1021638fd1498Szrj and/or ^ instead of |.
1021738fd1498Szrj If B is constant and (B & M) == 0, fold into A & M. */
1021838fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST)
1021938fd1498Szrj {
1022038fd1498Szrj wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
1022138fd1498Szrj if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
1022238fd1498Szrj && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1022338fd1498Szrj && (TREE_CODE (arg0) == PLUS_EXPR
1022438fd1498Szrj || TREE_CODE (arg0) == MINUS_EXPR
1022538fd1498Szrj || TREE_CODE (arg0) == NEGATE_EXPR)
1022638fd1498Szrj && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
1022738fd1498Szrj || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
1022838fd1498Szrj {
1022938fd1498Szrj tree pmop[2];
1023038fd1498Szrj int which = 0;
1023138fd1498Szrj wide_int cst0;
1023238fd1498Szrj
1023338fd1498Szrj /* Now we know that arg0 is (C + D) or (C - D) or
1023438fd1498Szrj -C and arg1 (M) is == (1LL << cst) - 1.
1023538fd1498Szrj Store C into PMOP[0] and D into PMOP[1]. */
1023638fd1498Szrj pmop[0] = TREE_OPERAND (arg0, 0);
1023738fd1498Szrj pmop[1] = NULL;
1023838fd1498Szrj if (TREE_CODE (arg0) != NEGATE_EXPR)
1023938fd1498Szrj {
1024038fd1498Szrj pmop[1] = TREE_OPERAND (arg0, 1);
1024138fd1498Szrj which = 1;
1024238fd1498Szrj }
1024338fd1498Szrj
1024438fd1498Szrj if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
1024538fd1498Szrj which = -1;
1024638fd1498Szrj
1024738fd1498Szrj for (; which >= 0; which--)
1024838fd1498Szrj switch (TREE_CODE (pmop[which]))
1024938fd1498Szrj {
1025038fd1498Szrj case BIT_AND_EXPR:
1025138fd1498Szrj case BIT_IOR_EXPR:
1025238fd1498Szrj case BIT_XOR_EXPR:
1025338fd1498Szrj if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
1025438fd1498Szrj != INTEGER_CST)
1025538fd1498Szrj break;
1025638fd1498Szrj cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
1025738fd1498Szrj if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
1025838fd1498Szrj {
1025938fd1498Szrj if (cst0 != cst1)
1026038fd1498Szrj break;
1026138fd1498Szrj }
1026238fd1498Szrj else if (cst0 != 0)
1026338fd1498Szrj break;
1026438fd1498Szrj /* If C or D is of the form (A & N) where
1026538fd1498Szrj (N & M) == M, or of the form (A | N) or
1026638fd1498Szrj (A ^ N) where (N & M) == 0, replace it with A. */
1026738fd1498Szrj pmop[which] = TREE_OPERAND (pmop[which], 0);
1026838fd1498Szrj break;
1026938fd1498Szrj case INTEGER_CST:
1027038fd1498Szrj /* If C or D is a N where (N & M) == 0, it can be
1027138fd1498Szrj omitted (assumed 0). */
1027238fd1498Szrj if ((TREE_CODE (arg0) == PLUS_EXPR
1027338fd1498Szrj || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
1027438fd1498Szrj && (cst1 & wi::to_wide (pmop[which])) == 0)
1027538fd1498Szrj pmop[which] = NULL;
1027638fd1498Szrj break;
1027738fd1498Szrj default:
1027838fd1498Szrj break;
1027938fd1498Szrj }
1028038fd1498Szrj
1028138fd1498Szrj /* Only build anything new if we optimized one or both arguments
1028238fd1498Szrj above. */
1028338fd1498Szrj if (pmop[0] != TREE_OPERAND (arg0, 0)
1028438fd1498Szrj || (TREE_CODE (arg0) != NEGATE_EXPR
1028538fd1498Szrj && pmop[1] != TREE_OPERAND (arg0, 1)))
1028638fd1498Szrj {
1028738fd1498Szrj tree utype = TREE_TYPE (arg0);
1028838fd1498Szrj if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
1028938fd1498Szrj {
1029038fd1498Szrj /* Perform the operations in a type that has defined
1029138fd1498Szrj overflow behavior. */
1029238fd1498Szrj utype = unsigned_type_for (TREE_TYPE (arg0));
1029338fd1498Szrj if (pmop[0] != NULL)
1029438fd1498Szrj pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
1029538fd1498Szrj if (pmop[1] != NULL)
1029638fd1498Szrj pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
1029738fd1498Szrj }
1029838fd1498Szrj
1029938fd1498Szrj if (TREE_CODE (arg0) == NEGATE_EXPR)
1030038fd1498Szrj tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
1030138fd1498Szrj else if (TREE_CODE (arg0) == PLUS_EXPR)
1030238fd1498Szrj {
1030338fd1498Szrj if (pmop[0] != NULL && pmop[1] != NULL)
1030438fd1498Szrj tem = fold_build2_loc (loc, PLUS_EXPR, utype,
1030538fd1498Szrj pmop[0], pmop[1]);
1030638fd1498Szrj else if (pmop[0] != NULL)
1030738fd1498Szrj tem = pmop[0];
1030838fd1498Szrj else if (pmop[1] != NULL)
1030938fd1498Szrj tem = pmop[1];
1031038fd1498Szrj else
1031138fd1498Szrj return build_int_cst (type, 0);
1031238fd1498Szrj }
1031338fd1498Szrj else if (pmop[0] == NULL)
1031438fd1498Szrj tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
1031538fd1498Szrj else
1031638fd1498Szrj tem = fold_build2_loc (loc, MINUS_EXPR, utype,
1031738fd1498Szrj pmop[0], pmop[1]);
1031838fd1498Szrj /* TEM is now the new binary +, - or unary - replacement. */
1031938fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
1032038fd1498Szrj fold_convert_loc (loc, utype, arg1));
1032138fd1498Szrj return fold_convert_loc (loc, type, tem);
1032238fd1498Szrj }
1032338fd1498Szrj }
1032438fd1498Szrj }
1032538fd1498Szrj
1032638fd1498Szrj /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
1032738fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
1032838fd1498Szrj && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
1032938fd1498Szrj {
1033038fd1498Szrj prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
1033138fd1498Szrj
1033238fd1498Szrj wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
1033338fd1498Szrj if (mask == -1)
1033438fd1498Szrj return
1033538fd1498Szrj fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
1033638fd1498Szrj }
1033738fd1498Szrj
1033838fd1498Szrj goto associate;
1033938fd1498Szrj
1034038fd1498Szrj case RDIV_EXPR:
1034138fd1498Szrj /* Don't touch a floating-point divide by zero unless the mode
1034238fd1498Szrj of the constant can represent infinity. */
1034338fd1498Szrj if (TREE_CODE (arg1) == REAL_CST
1034438fd1498Szrj && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
1034538fd1498Szrj && real_zerop (arg1))
1034638fd1498Szrj return NULL_TREE;
1034738fd1498Szrj
1034838fd1498Szrj /* (-A) / (-B) -> A / B */
1034938fd1498Szrj if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
1035038fd1498Szrj return fold_build2_loc (loc, RDIV_EXPR, type,
1035138fd1498Szrj TREE_OPERAND (arg0, 0),
1035238fd1498Szrj negate_expr (arg1));
1035338fd1498Szrj if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
1035438fd1498Szrj return fold_build2_loc (loc, RDIV_EXPR, type,
1035538fd1498Szrj negate_expr (arg0),
1035638fd1498Szrj TREE_OPERAND (arg1, 0));
1035738fd1498Szrj return NULL_TREE;
1035838fd1498Szrj
1035938fd1498Szrj case TRUNC_DIV_EXPR:
1036038fd1498Szrj /* Fall through */
1036138fd1498Szrj
1036238fd1498Szrj case FLOOR_DIV_EXPR:
1036338fd1498Szrj /* Simplify A / (B << N) where A and B are positive and B is
1036438fd1498Szrj a power of 2, to A >> (N + log2(B)). */
1036538fd1498Szrj strict_overflow_p = false;
1036638fd1498Szrj if (TREE_CODE (arg1) == LSHIFT_EXPR
1036738fd1498Szrj && (TYPE_UNSIGNED (type)
1036838fd1498Szrj || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
1036938fd1498Szrj {
1037038fd1498Szrj tree sval = TREE_OPERAND (arg1, 0);
1037138fd1498Szrj if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
1037238fd1498Szrj {
1037338fd1498Szrj tree sh_cnt = TREE_OPERAND (arg1, 1);
1037438fd1498Szrj tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
1037538fd1498Szrj wi::exact_log2 (wi::to_wide (sval)));
1037638fd1498Szrj
1037738fd1498Szrj if (strict_overflow_p)
1037838fd1498Szrj fold_overflow_warning (("assuming signed overflow does not "
1037938fd1498Szrj "occur when simplifying A / (B << N)"),
1038038fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1038138fd1498Szrj
1038238fd1498Szrj sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
1038338fd1498Szrj sh_cnt, pow2);
1038438fd1498Szrj return fold_build2_loc (loc, RSHIFT_EXPR, type,
1038538fd1498Szrj fold_convert_loc (loc, type, arg0), sh_cnt);
1038638fd1498Szrj }
1038738fd1498Szrj }
1038838fd1498Szrj
1038938fd1498Szrj /* Fall through */
1039038fd1498Szrj
1039138fd1498Szrj case ROUND_DIV_EXPR:
1039238fd1498Szrj case CEIL_DIV_EXPR:
1039338fd1498Szrj case EXACT_DIV_EXPR:
1039438fd1498Szrj if (integer_zerop (arg1))
1039538fd1498Szrj return NULL_TREE;
1039638fd1498Szrj
1039738fd1498Szrj /* Convert -A / -B to A / B when the type is signed and overflow is
1039838fd1498Szrj undefined. */
1039938fd1498Szrj if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1040038fd1498Szrj && TREE_CODE (op0) == NEGATE_EXPR
1040138fd1498Szrj && negate_expr_p (op1))
1040238fd1498Szrj {
1040338fd1498Szrj if (INTEGRAL_TYPE_P (type))
1040438fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur "
1040538fd1498Szrj "when distributing negation across "
1040638fd1498Szrj "division"),
1040738fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1040838fd1498Szrj return fold_build2_loc (loc, code, type,
1040938fd1498Szrj fold_convert_loc (loc, type,
1041038fd1498Szrj TREE_OPERAND (arg0, 0)),
1041138fd1498Szrj negate_expr (op1));
1041238fd1498Szrj }
1041338fd1498Szrj if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1041438fd1498Szrj && TREE_CODE (arg1) == NEGATE_EXPR
1041538fd1498Szrj && negate_expr_p (op0))
1041638fd1498Szrj {
1041738fd1498Szrj if (INTEGRAL_TYPE_P (type))
1041838fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur "
1041938fd1498Szrj "when distributing negation across "
1042038fd1498Szrj "division"),
1042138fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1042238fd1498Szrj return fold_build2_loc (loc, code, type,
1042338fd1498Szrj negate_expr (op0),
1042438fd1498Szrj fold_convert_loc (loc, type,
1042538fd1498Szrj TREE_OPERAND (arg1, 0)));
1042638fd1498Szrj }
1042738fd1498Szrj
1042838fd1498Szrj /* If arg0 is a multiple of arg1, then rewrite to the fastest div
1042938fd1498Szrj operation, EXACT_DIV_EXPR.
1043038fd1498Szrj
1043138fd1498Szrj Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
1043238fd1498Szrj At one time others generated faster code, it's not clear if they do
1043338fd1498Szrj after the last round to changes to the DIV code in expmed.c. */
1043438fd1498Szrj if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
1043538fd1498Szrj && multiple_of_p (type, arg0, arg1))
1043638fd1498Szrj return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
1043738fd1498Szrj fold_convert (type, arg0),
1043838fd1498Szrj fold_convert (type, arg1));
1043938fd1498Szrj
1044038fd1498Szrj strict_overflow_p = false;
1044138fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST
1044238fd1498Szrj && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
1044338fd1498Szrj &strict_overflow_p)) != 0)
1044438fd1498Szrj {
1044538fd1498Szrj if (strict_overflow_p)
1044638fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur "
1044738fd1498Szrj "when simplifying division"),
1044838fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1044938fd1498Szrj return fold_convert_loc (loc, type, tem);
1045038fd1498Szrj }
1045138fd1498Szrj
1045238fd1498Szrj return NULL_TREE;
1045338fd1498Szrj
1045438fd1498Szrj case CEIL_MOD_EXPR:
1045538fd1498Szrj case FLOOR_MOD_EXPR:
1045638fd1498Szrj case ROUND_MOD_EXPR:
1045738fd1498Szrj case TRUNC_MOD_EXPR:
1045838fd1498Szrj strict_overflow_p = false;
1045938fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST
1046038fd1498Szrj && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
1046138fd1498Szrj &strict_overflow_p)) != 0)
1046238fd1498Szrj {
1046338fd1498Szrj if (strict_overflow_p)
1046438fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur "
1046538fd1498Szrj "when simplifying modulus"),
1046638fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1046738fd1498Szrj return fold_convert_loc (loc, type, tem);
1046838fd1498Szrj }
1046938fd1498Szrj
1047038fd1498Szrj return NULL_TREE;
1047138fd1498Szrj
1047238fd1498Szrj case LROTATE_EXPR:
1047338fd1498Szrj case RROTATE_EXPR:
1047438fd1498Szrj case RSHIFT_EXPR:
1047538fd1498Szrj case LSHIFT_EXPR:
1047638fd1498Szrj /* Since negative shift count is not well-defined,
1047738fd1498Szrj don't try to compute it in the compiler. */
1047838fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
1047938fd1498Szrj return NULL_TREE;
1048038fd1498Szrj
1048138fd1498Szrj prec = element_precision (type);
1048238fd1498Szrj
1048338fd1498Szrj /* If we have a rotate of a bit operation with the rotate count and
1048438fd1498Szrj the second operand of the bit operation both constant,
1048538fd1498Szrj permute the two operations. */
1048638fd1498Szrj if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
1048738fd1498Szrj && (TREE_CODE (arg0) == BIT_AND_EXPR
1048838fd1498Szrj || TREE_CODE (arg0) == BIT_IOR_EXPR
1048938fd1498Szrj || TREE_CODE (arg0) == BIT_XOR_EXPR)
1049038fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
1049138fd1498Szrj {
1049238fd1498Szrj tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
1049338fd1498Szrj tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
1049438fd1498Szrj return fold_build2_loc (loc, TREE_CODE (arg0), type,
1049538fd1498Szrj fold_build2_loc (loc, code, type,
1049638fd1498Szrj arg00, arg1),
1049738fd1498Szrj fold_build2_loc (loc, code, type,
1049838fd1498Szrj arg01, arg1));
1049938fd1498Szrj }
1050038fd1498Szrj
1050138fd1498Szrj /* Two consecutive rotates adding up to the some integer
1050238fd1498Szrj multiple of the precision of the type can be ignored. */
1050338fd1498Szrj if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
1050438fd1498Szrj && TREE_CODE (arg0) == RROTATE_EXPR
1050538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
1050638fd1498Szrj && wi::umod_trunc (wi::to_wide (arg1)
1050738fd1498Szrj + wi::to_wide (TREE_OPERAND (arg0, 1)),
1050838fd1498Szrj prec) == 0)
1050938fd1498Szrj return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
1051038fd1498Szrj
1051138fd1498Szrj return NULL_TREE;
1051238fd1498Szrj
1051338fd1498Szrj case MIN_EXPR:
1051438fd1498Szrj case MAX_EXPR:
1051538fd1498Szrj goto associate;
1051638fd1498Szrj
1051738fd1498Szrj case TRUTH_ANDIF_EXPR:
1051838fd1498Szrj /* Note that the operands of this must be ints
1051938fd1498Szrj and their values must be 0 or 1.
1052038fd1498Szrj ("true" is a fixed value perhaps depending on the language.) */
1052138fd1498Szrj /* If first arg is constant zero, return it. */
1052238fd1498Szrj if (integer_zerop (arg0))
1052338fd1498Szrj return fold_convert_loc (loc, type, arg0);
1052438fd1498Szrj /* FALLTHRU */
1052538fd1498Szrj case TRUTH_AND_EXPR:
1052638fd1498Szrj /* If either arg is constant true, drop it. */
1052738fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
1052838fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
1052938fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
1053038fd1498Szrj /* Preserve sequence points. */
1053138fd1498Szrj && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
1053238fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
1053338fd1498Szrj /* If second arg is constant zero, result is zero, but first arg
1053438fd1498Szrj must be evaluated. */
1053538fd1498Szrj if (integer_zerop (arg1))
1053638fd1498Szrj return omit_one_operand_loc (loc, type, arg1, arg0);
1053738fd1498Szrj /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
1053838fd1498Szrj case will be handled here. */
1053938fd1498Szrj if (integer_zerop (arg0))
1054038fd1498Szrj return omit_one_operand_loc (loc, type, arg0, arg1);
1054138fd1498Szrj
1054238fd1498Szrj /* !X && X is always false. */
1054338fd1498Szrj if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
1054438fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
1054538fd1498Szrj return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
1054638fd1498Szrj /* X && !X is always false. */
1054738fd1498Szrj if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
1054838fd1498Szrj && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
1054938fd1498Szrj return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
1055038fd1498Szrj
1055138fd1498Szrj /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
1055238fd1498Szrj means A >= Y && A != MAX, but in this case we know that
1055338fd1498Szrj A < X <= MAX. */
1055438fd1498Szrj
1055538fd1498Szrj if (!TREE_SIDE_EFFECTS (arg0)
1055638fd1498Szrj && !TREE_SIDE_EFFECTS (arg1))
1055738fd1498Szrj {
1055838fd1498Szrj tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
1055938fd1498Szrj if (tem && !operand_equal_p (tem, arg0, 0))
1056038fd1498Szrj return fold_build2_loc (loc, code, type, tem, arg1);
1056138fd1498Szrj
1056238fd1498Szrj tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
1056338fd1498Szrj if (tem && !operand_equal_p (tem, arg1, 0))
1056438fd1498Szrj return fold_build2_loc (loc, code, type, arg0, tem);
1056538fd1498Szrj }
1056638fd1498Szrj
1056738fd1498Szrj if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
1056838fd1498Szrj != NULL_TREE)
1056938fd1498Szrj return tem;
1057038fd1498Szrj
1057138fd1498Szrj return NULL_TREE;
1057238fd1498Szrj
1057338fd1498Szrj case TRUTH_ORIF_EXPR:
1057438fd1498Szrj /* Note that the operands of this must be ints
1057538fd1498Szrj and their values must be 0 or true.
1057638fd1498Szrj ("true" is a fixed value perhaps depending on the language.) */
1057738fd1498Szrj /* If first arg is constant true, return it. */
1057838fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
1057938fd1498Szrj return fold_convert_loc (loc, type, arg0);
1058038fd1498Szrj /* FALLTHRU */
1058138fd1498Szrj case TRUTH_OR_EXPR:
1058238fd1498Szrj /* If either arg is constant zero, drop it. */
1058338fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
1058438fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
1058538fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
1058638fd1498Szrj /* Preserve sequence points. */
1058738fd1498Szrj && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
1058838fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
1058938fd1498Szrj /* If second arg is constant true, result is true, but we must
1059038fd1498Szrj evaluate first arg. */
1059138fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
1059238fd1498Szrj return omit_one_operand_loc (loc, type, arg1, arg0);
1059338fd1498Szrj /* Likewise for first arg, but note this only occurs here for
1059438fd1498Szrj TRUTH_OR_EXPR. */
1059538fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
1059638fd1498Szrj return omit_one_operand_loc (loc, type, arg0, arg1);
1059738fd1498Szrj
1059838fd1498Szrj /* !X || X is always true. */
1059938fd1498Szrj if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
1060038fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
1060138fd1498Szrj return omit_one_operand_loc (loc, type, integer_one_node, arg1);
1060238fd1498Szrj /* X || !X is always true. */
1060338fd1498Szrj if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
1060438fd1498Szrj && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
1060538fd1498Szrj return omit_one_operand_loc (loc, type, integer_one_node, arg0);
1060638fd1498Szrj
1060738fd1498Szrj /* (X && !Y) || (!X && Y) is X ^ Y */
1060838fd1498Szrj if (TREE_CODE (arg0) == TRUTH_AND_EXPR
1060938fd1498Szrj && TREE_CODE (arg1) == TRUTH_AND_EXPR)
1061038fd1498Szrj {
1061138fd1498Szrj tree a0, a1, l0, l1, n0, n1;
1061238fd1498Szrj
1061338fd1498Szrj a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
1061438fd1498Szrj a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
1061538fd1498Szrj
1061638fd1498Szrj l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
1061738fd1498Szrj l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
1061838fd1498Szrj
1061938fd1498Szrj n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
1062038fd1498Szrj n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
1062138fd1498Szrj
1062238fd1498Szrj if ((operand_equal_p (n0, a0, 0)
1062338fd1498Szrj && operand_equal_p (n1, a1, 0))
1062438fd1498Szrj || (operand_equal_p (n0, a1, 0)
1062538fd1498Szrj && operand_equal_p (n1, a0, 0)))
1062638fd1498Szrj return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
1062738fd1498Szrj }
1062838fd1498Szrj
1062938fd1498Szrj if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
1063038fd1498Szrj != NULL_TREE)
1063138fd1498Szrj return tem;
1063238fd1498Szrj
1063338fd1498Szrj return NULL_TREE;
1063438fd1498Szrj
1063538fd1498Szrj case TRUTH_XOR_EXPR:
1063638fd1498Szrj /* If the second arg is constant zero, drop it. */
1063738fd1498Szrj if (integer_zerop (arg1))
1063838fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
1063938fd1498Szrj /* If the second arg is constant true, this is a logical inversion. */
1064038fd1498Szrj if (integer_onep (arg1))
1064138fd1498Szrj {
1064238fd1498Szrj tem = invert_truthvalue_loc (loc, arg0);
1064338fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
1064438fd1498Szrj }
1064538fd1498Szrj /* Identical arguments cancel to zero. */
1064638fd1498Szrj if (operand_equal_p (arg0, arg1, 0))
1064738fd1498Szrj return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
1064838fd1498Szrj
1064938fd1498Szrj /* !X ^ X is always true. */
1065038fd1498Szrj if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
1065138fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
1065238fd1498Szrj return omit_one_operand_loc (loc, type, integer_one_node, arg1);
1065338fd1498Szrj
1065438fd1498Szrj /* X ^ !X is always true. */
1065538fd1498Szrj if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
1065638fd1498Szrj && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
1065738fd1498Szrj return omit_one_operand_loc (loc, type, integer_one_node, arg0);
1065838fd1498Szrj
1065938fd1498Szrj return NULL_TREE;
1066038fd1498Szrj
1066138fd1498Szrj case EQ_EXPR:
1066238fd1498Szrj case NE_EXPR:
1066338fd1498Szrj STRIP_NOPS (arg0);
1066438fd1498Szrj STRIP_NOPS (arg1);
1066538fd1498Szrj
1066638fd1498Szrj tem = fold_comparison (loc, code, type, op0, op1);
1066738fd1498Szrj if (tem != NULL_TREE)
1066838fd1498Szrj return tem;
1066938fd1498Szrj
1067038fd1498Szrj /* bool_var != 1 becomes !bool_var. */
1067138fd1498Szrj if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
1067238fd1498Szrj && code == NE_EXPR)
1067338fd1498Szrj return fold_convert_loc (loc, type,
1067438fd1498Szrj fold_build1_loc (loc, TRUTH_NOT_EXPR,
1067538fd1498Szrj TREE_TYPE (arg0), arg0));
1067638fd1498Szrj
1067738fd1498Szrj /* bool_var == 0 becomes !bool_var. */
1067838fd1498Szrj if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
1067938fd1498Szrj && code == EQ_EXPR)
1068038fd1498Szrj return fold_convert_loc (loc, type,
1068138fd1498Szrj fold_build1_loc (loc, TRUTH_NOT_EXPR,
1068238fd1498Szrj TREE_TYPE (arg0), arg0));
1068338fd1498Szrj
1068438fd1498Szrj /* !exp != 0 becomes !exp */
1068538fd1498Szrj if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
1068638fd1498Szrj && code == NE_EXPR)
1068738fd1498Szrj return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
1068838fd1498Szrj
1068938fd1498Szrj /* If this is an EQ or NE comparison with zero and ARG0 is
1069038fd1498Szrj (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
1069138fd1498Szrj two operations, but the latter can be done in one less insn
1069238fd1498Szrj on machines that have only two-operand insns or on which a
1069338fd1498Szrj constant cannot be the first operand. */
1069438fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1069538fd1498Szrj && integer_zerop (arg1))
1069638fd1498Szrj {
1069738fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
1069838fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
1069938fd1498Szrj if (TREE_CODE (arg00) == LSHIFT_EXPR
1070038fd1498Szrj && integer_onep (TREE_OPERAND (arg00, 0)))
1070138fd1498Szrj {
1070238fd1498Szrj tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
1070338fd1498Szrj arg01, TREE_OPERAND (arg00, 1));
1070438fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
1070538fd1498Szrj build_int_cst (TREE_TYPE (arg0), 1));
1070638fd1498Szrj return fold_build2_loc (loc, code, type,
1070738fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg1), tem),
1070838fd1498Szrj arg1);
1070938fd1498Szrj }
1071038fd1498Szrj else if (TREE_CODE (arg01) == LSHIFT_EXPR
1071138fd1498Szrj && integer_onep (TREE_OPERAND (arg01, 0)))
1071238fd1498Szrj {
1071338fd1498Szrj tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
1071438fd1498Szrj arg00, TREE_OPERAND (arg01, 1));
1071538fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
1071638fd1498Szrj build_int_cst (TREE_TYPE (arg0), 1));
1071738fd1498Szrj return fold_build2_loc (loc, code, type,
1071838fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg1), tem),
1071938fd1498Szrj arg1);
1072038fd1498Szrj }
1072138fd1498Szrj }
1072238fd1498Szrj
1072338fd1498Szrj /* If this is an NE or EQ comparison of zero against the result of a
1072438fd1498Szrj signed MOD operation whose second operand is a power of 2, make
1072538fd1498Szrj the MOD operation unsigned since it is simpler and equivalent. */
1072638fd1498Szrj if (integer_zerop (arg1)
1072738fd1498Szrj && !TYPE_UNSIGNED (TREE_TYPE (arg0))
1072838fd1498Szrj && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
1072938fd1498Szrj || TREE_CODE (arg0) == CEIL_MOD_EXPR
1073038fd1498Szrj || TREE_CODE (arg0) == FLOOR_MOD_EXPR
1073138fd1498Szrj || TREE_CODE (arg0) == ROUND_MOD_EXPR)
1073238fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1)))
1073338fd1498Szrj {
1073438fd1498Szrj tree newtype = unsigned_type_for (TREE_TYPE (arg0));
1073538fd1498Szrj tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
1073638fd1498Szrj fold_convert_loc (loc, newtype,
1073738fd1498Szrj TREE_OPERAND (arg0, 0)),
1073838fd1498Szrj fold_convert_loc (loc, newtype,
1073938fd1498Szrj TREE_OPERAND (arg0, 1)));
1074038fd1498Szrj
1074138fd1498Szrj return fold_build2_loc (loc, code, type, newmod,
1074238fd1498Szrj fold_convert_loc (loc, newtype, arg1));
1074338fd1498Szrj }
1074438fd1498Szrj
1074538fd1498Szrj /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
1074638fd1498Szrj C1 is a valid shift constant, and C2 is a power of two, i.e.
1074738fd1498Szrj a single bit. */
1074838fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1074938fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
1075038fd1498Szrj && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
1075138fd1498Szrj == INTEGER_CST
1075238fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1))
1075338fd1498Szrj && integer_zerop (arg1))
1075438fd1498Szrj {
1075538fd1498Szrj tree itype = TREE_TYPE (arg0);
1075638fd1498Szrj tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
1075738fd1498Szrj prec = TYPE_PRECISION (itype);
1075838fd1498Szrj
1075938fd1498Szrj /* Check for a valid shift count. */
1076038fd1498Szrj if (wi::ltu_p (wi::to_wide (arg001), prec))
1076138fd1498Szrj {
1076238fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
1076338fd1498Szrj tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
1076438fd1498Szrj unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
1076538fd1498Szrj /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
1076638fd1498Szrj can be rewritten as (X & (C2 << C1)) != 0. */
1076738fd1498Szrj if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
1076838fd1498Szrj {
1076938fd1498Szrj tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
1077038fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
1077138fd1498Szrj return fold_build2_loc (loc, code, type, tem,
1077238fd1498Szrj fold_convert_loc (loc, itype, arg1));
1077338fd1498Szrj }
1077438fd1498Szrj /* Otherwise, for signed (arithmetic) shifts,
1077538fd1498Szrj ((X >> C1) & C2) != 0 is rewritten as X < 0, and
1077638fd1498Szrj ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
1077738fd1498Szrj else if (!TYPE_UNSIGNED (itype))
1077838fd1498Szrj return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
1077938fd1498Szrj arg000, build_int_cst (itype, 0));
1078038fd1498Szrj /* Otherwise, of unsigned (logical) shifts,
1078138fd1498Szrj ((X >> C1) & C2) != 0 is rewritten as (X,false), and
1078238fd1498Szrj ((X >> C1) & C2) == 0 is rewritten as (X,true). */
1078338fd1498Szrj else
1078438fd1498Szrj return omit_one_operand_loc (loc, type,
1078538fd1498Szrj code == EQ_EXPR ? integer_one_node
1078638fd1498Szrj : integer_zero_node,
1078738fd1498Szrj arg000);
1078838fd1498Szrj }
1078938fd1498Szrj }
1079038fd1498Szrj
1079138fd1498Szrj /* If this is a comparison of a field, we may be able to simplify it. */
1079238fd1498Szrj if ((TREE_CODE (arg0) == COMPONENT_REF
1079338fd1498Szrj || TREE_CODE (arg0) == BIT_FIELD_REF)
1079438fd1498Szrj /* Handle the constant case even without -O
1079538fd1498Szrj to make sure the warnings are given. */
1079638fd1498Szrj && (optimize || TREE_CODE (arg1) == INTEGER_CST))
1079738fd1498Szrj {
1079838fd1498Szrj t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
1079938fd1498Szrj if (t1)
1080038fd1498Szrj return t1;
1080138fd1498Szrj }
1080238fd1498Szrj
1080338fd1498Szrj /* Optimize comparisons of strlen vs zero to a compare of the
1080438fd1498Szrj first character of the string vs zero. To wit,
1080538fd1498Szrj strlen(ptr) == 0 => *ptr == 0
1080638fd1498Szrj strlen(ptr) != 0 => *ptr != 0
1080738fd1498Szrj Other cases should reduce to one of these two (or a constant)
1080838fd1498Szrj due to the return value of strlen being unsigned. */
1080958e805e6Szrj if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
1081038fd1498Szrj {
1081138fd1498Szrj tree fndecl = get_callee_fndecl (arg0);
1081238fd1498Szrj
1081338fd1498Szrj if (fndecl
1081438fd1498Szrj && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1081538fd1498Szrj && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
1081638fd1498Szrj && call_expr_nargs (arg0) == 1
1081758e805e6Szrj && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
1081858e805e6Szrj == POINTER_TYPE))
1081938fd1498Szrj {
1082058e805e6Szrj tree ptrtype
1082158e805e6Szrj = build_pointer_type (build_qualified_type (char_type_node,
1082258e805e6Szrj TYPE_QUAL_CONST));
1082358e805e6Szrj tree ptr = fold_convert_loc (loc, ptrtype,
1082438fd1498Szrj CALL_EXPR_ARG (arg0, 0));
1082558e805e6Szrj tree iref = build_fold_indirect_ref_loc (loc, ptr);
1082638fd1498Szrj return fold_build2_loc (loc, code, type, iref,
1082738fd1498Szrj build_int_cst (TREE_TYPE (iref), 0));
1082838fd1498Szrj }
1082938fd1498Szrj }
1083038fd1498Szrj
1083138fd1498Szrj /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
1083238fd1498Szrj of X. Similarly fold (X >> C) == 0 into X >= 0. */
1083338fd1498Szrj if (TREE_CODE (arg0) == RSHIFT_EXPR
1083438fd1498Szrj && integer_zerop (arg1)
1083538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
1083638fd1498Szrj {
1083738fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
1083838fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
1083938fd1498Szrj tree itype = TREE_TYPE (arg00);
1084038fd1498Szrj if (wi::to_wide (arg01) == element_precision (itype) - 1)
1084138fd1498Szrj {
1084238fd1498Szrj if (TYPE_UNSIGNED (itype))
1084338fd1498Szrj {
1084438fd1498Szrj itype = signed_type_for (itype);
1084538fd1498Szrj arg00 = fold_convert_loc (loc, itype, arg00);
1084638fd1498Szrj }
1084738fd1498Szrj return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
1084838fd1498Szrj type, arg00, build_zero_cst (itype));
1084938fd1498Szrj }
1085038fd1498Szrj }
1085138fd1498Szrj
1085238fd1498Szrj /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
1085338fd1498Szrj (X & C) == 0 when C is a single bit. */
1085438fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1085538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
1085638fd1498Szrj && integer_zerop (arg1)
1085738fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1)))
1085838fd1498Szrj {
1085938fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
1086038fd1498Szrj TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
1086138fd1498Szrj TREE_OPERAND (arg0, 1));
1086238fd1498Szrj return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
1086338fd1498Szrj type, tem,
1086438fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg0),
1086538fd1498Szrj arg1));
1086638fd1498Szrj }
1086738fd1498Szrj
1086838fd1498Szrj /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
1086938fd1498Szrj constant C is a power of two, i.e. a single bit. */
1087038fd1498Szrj if (TREE_CODE (arg0) == BIT_XOR_EXPR
1087138fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
1087238fd1498Szrj && integer_zerop (arg1)
1087338fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1))
1087438fd1498Szrj && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
1087538fd1498Szrj TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
1087638fd1498Szrj {
1087738fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
1087838fd1498Szrj return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
1087938fd1498Szrj arg00, build_int_cst (TREE_TYPE (arg00), 0));
1088038fd1498Szrj }
1088138fd1498Szrj
1088238fd1498Szrj /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
1088338fd1498Szrj when is C is a power of two, i.e. a single bit. */
1088438fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1088538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
1088638fd1498Szrj && integer_zerop (arg1)
1088738fd1498Szrj && integer_pow2p (TREE_OPERAND (arg0, 1))
1088838fd1498Szrj && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
1088938fd1498Szrj TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
1089038fd1498Szrj {
1089138fd1498Szrj tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
1089238fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
1089338fd1498Szrj arg000, TREE_OPERAND (arg0, 1));
1089438fd1498Szrj return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
1089538fd1498Szrj tem, build_int_cst (TREE_TYPE (tem), 0));
1089638fd1498Szrj }
1089738fd1498Szrj
1089838fd1498Szrj if (integer_zerop (arg1)
1089938fd1498Szrj && tree_expr_nonzero_p (arg0))
1090038fd1498Szrj {
1090138fd1498Szrj tree res = constant_boolean_node (code==NE_EXPR, type);
1090238fd1498Szrj return omit_one_operand_loc (loc, type, res, arg0);
1090338fd1498Szrj }
1090438fd1498Szrj
1090538fd1498Szrj /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
1090638fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1090738fd1498Szrj && TREE_CODE (arg1) == BIT_AND_EXPR)
1090838fd1498Szrj {
1090938fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
1091038fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
1091138fd1498Szrj tree arg10 = TREE_OPERAND (arg1, 0);
1091238fd1498Szrj tree arg11 = TREE_OPERAND (arg1, 1);
1091338fd1498Szrj tree itype = TREE_TYPE (arg0);
1091438fd1498Szrj
1091538fd1498Szrj if (operand_equal_p (arg01, arg11, 0))
1091638fd1498Szrj {
1091738fd1498Szrj tem = fold_convert_loc (loc, itype, arg10);
1091838fd1498Szrj tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
1091938fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
1092038fd1498Szrj return fold_build2_loc (loc, code, type, tem,
1092138fd1498Szrj build_zero_cst (itype));
1092238fd1498Szrj }
1092338fd1498Szrj if (operand_equal_p (arg01, arg10, 0))
1092438fd1498Szrj {
1092538fd1498Szrj tem = fold_convert_loc (loc, itype, arg11);
1092638fd1498Szrj tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
1092738fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
1092838fd1498Szrj return fold_build2_loc (loc, code, type, tem,
1092938fd1498Szrj build_zero_cst (itype));
1093038fd1498Szrj }
1093138fd1498Szrj if (operand_equal_p (arg00, arg11, 0))
1093238fd1498Szrj {
1093338fd1498Szrj tem = fold_convert_loc (loc, itype, arg10);
1093438fd1498Szrj tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
1093538fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
1093638fd1498Szrj return fold_build2_loc (loc, code, type, tem,
1093738fd1498Szrj build_zero_cst (itype));
1093838fd1498Szrj }
1093938fd1498Szrj if (operand_equal_p (arg00, arg10, 0))
1094038fd1498Szrj {
1094138fd1498Szrj tem = fold_convert_loc (loc, itype, arg11);
1094238fd1498Szrj tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
1094338fd1498Szrj tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
1094438fd1498Szrj return fold_build2_loc (loc, code, type, tem,
1094538fd1498Szrj build_zero_cst (itype));
1094638fd1498Szrj }
1094738fd1498Szrj }
1094838fd1498Szrj
1094938fd1498Szrj if (TREE_CODE (arg0) == BIT_XOR_EXPR
1095038fd1498Szrj && TREE_CODE (arg1) == BIT_XOR_EXPR)
1095138fd1498Szrj {
1095238fd1498Szrj tree arg00 = TREE_OPERAND (arg0, 0);
1095338fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
1095438fd1498Szrj tree arg10 = TREE_OPERAND (arg1, 0);
1095538fd1498Szrj tree arg11 = TREE_OPERAND (arg1, 1);
1095638fd1498Szrj tree itype = TREE_TYPE (arg0);
1095738fd1498Szrj
1095838fd1498Szrj /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
1095938fd1498Szrj operand_equal_p guarantees no side-effects so we don't need
1096038fd1498Szrj to use omit_one_operand on Z. */
1096138fd1498Szrj if (operand_equal_p (arg01, arg11, 0))
1096238fd1498Szrj return fold_build2_loc (loc, code, type, arg00,
1096338fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg00),
1096438fd1498Szrj arg10));
1096538fd1498Szrj if (operand_equal_p (arg01, arg10, 0))
1096638fd1498Szrj return fold_build2_loc (loc, code, type, arg00,
1096738fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg00),
1096838fd1498Szrj arg11));
1096938fd1498Szrj if (operand_equal_p (arg00, arg11, 0))
1097038fd1498Szrj return fold_build2_loc (loc, code, type, arg01,
1097138fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg01),
1097238fd1498Szrj arg10));
1097338fd1498Szrj if (operand_equal_p (arg00, arg10, 0))
1097438fd1498Szrj return fold_build2_loc (loc, code, type, arg01,
1097538fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg01),
1097638fd1498Szrj arg11));
1097738fd1498Szrj
1097838fd1498Szrj /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
1097938fd1498Szrj if (TREE_CODE (arg01) == INTEGER_CST
1098038fd1498Szrj && TREE_CODE (arg11) == INTEGER_CST)
1098138fd1498Szrj {
1098238fd1498Szrj tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
1098338fd1498Szrj fold_convert_loc (loc, itype, arg11));
1098438fd1498Szrj tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
1098538fd1498Szrj return fold_build2_loc (loc, code, type, tem,
1098638fd1498Szrj fold_convert_loc (loc, itype, arg10));
1098738fd1498Szrj }
1098838fd1498Szrj }
1098938fd1498Szrj
1099038fd1498Szrj /* Attempt to simplify equality/inequality comparisons of complex
1099138fd1498Szrj values. Only lower the comparison if the result is known or
1099238fd1498Szrj can be simplified to a single scalar comparison. */
1099338fd1498Szrj if ((TREE_CODE (arg0) == COMPLEX_EXPR
1099438fd1498Szrj || TREE_CODE (arg0) == COMPLEX_CST)
1099538fd1498Szrj && (TREE_CODE (arg1) == COMPLEX_EXPR
1099638fd1498Szrj || TREE_CODE (arg1) == COMPLEX_CST))
1099738fd1498Szrj {
1099838fd1498Szrj tree real0, imag0, real1, imag1;
1099938fd1498Szrj tree rcond, icond;
1100038fd1498Szrj
1100138fd1498Szrj if (TREE_CODE (arg0) == COMPLEX_EXPR)
1100238fd1498Szrj {
1100338fd1498Szrj real0 = TREE_OPERAND (arg0, 0);
1100438fd1498Szrj imag0 = TREE_OPERAND (arg0, 1);
1100538fd1498Szrj }
1100638fd1498Szrj else
1100738fd1498Szrj {
1100838fd1498Szrj real0 = TREE_REALPART (arg0);
1100938fd1498Szrj imag0 = TREE_IMAGPART (arg0);
1101038fd1498Szrj }
1101138fd1498Szrj
1101238fd1498Szrj if (TREE_CODE (arg1) == COMPLEX_EXPR)
1101338fd1498Szrj {
1101438fd1498Szrj real1 = TREE_OPERAND (arg1, 0);
1101538fd1498Szrj imag1 = TREE_OPERAND (arg1, 1);
1101638fd1498Szrj }
1101738fd1498Szrj else
1101838fd1498Szrj {
1101938fd1498Szrj real1 = TREE_REALPART (arg1);
1102038fd1498Szrj imag1 = TREE_IMAGPART (arg1);
1102138fd1498Szrj }
1102238fd1498Szrj
1102338fd1498Szrj rcond = fold_binary_loc (loc, code, type, real0, real1);
1102438fd1498Szrj if (rcond && TREE_CODE (rcond) == INTEGER_CST)
1102538fd1498Szrj {
1102638fd1498Szrj if (integer_zerop (rcond))
1102738fd1498Szrj {
1102838fd1498Szrj if (code == EQ_EXPR)
1102938fd1498Szrj return omit_two_operands_loc (loc, type, boolean_false_node,
1103038fd1498Szrj imag0, imag1);
1103138fd1498Szrj return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
1103238fd1498Szrj }
1103338fd1498Szrj else
1103438fd1498Szrj {
1103538fd1498Szrj if (code == NE_EXPR)
1103638fd1498Szrj return omit_two_operands_loc (loc, type, boolean_true_node,
1103738fd1498Szrj imag0, imag1);
1103838fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
1103938fd1498Szrj }
1104038fd1498Szrj }
1104138fd1498Szrj
1104238fd1498Szrj icond = fold_binary_loc (loc, code, type, imag0, imag1);
1104338fd1498Szrj if (icond && TREE_CODE (icond) == INTEGER_CST)
1104438fd1498Szrj {
1104538fd1498Szrj if (integer_zerop (icond))
1104638fd1498Szrj {
1104738fd1498Szrj if (code == EQ_EXPR)
1104838fd1498Szrj return omit_two_operands_loc (loc, type, boolean_false_node,
1104938fd1498Szrj real0, real1);
1105038fd1498Szrj return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
1105138fd1498Szrj }
1105238fd1498Szrj else
1105338fd1498Szrj {
1105438fd1498Szrj if (code == NE_EXPR)
1105538fd1498Szrj return omit_two_operands_loc (loc, type, boolean_true_node,
1105638fd1498Szrj real0, real1);
1105738fd1498Szrj return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
1105838fd1498Szrj }
1105938fd1498Szrj }
1106038fd1498Szrj }
1106138fd1498Szrj
1106238fd1498Szrj return NULL_TREE;
1106338fd1498Szrj
1106438fd1498Szrj case LT_EXPR:
1106538fd1498Szrj case GT_EXPR:
1106638fd1498Szrj case LE_EXPR:
1106738fd1498Szrj case GE_EXPR:
1106838fd1498Szrj tem = fold_comparison (loc, code, type, op0, op1);
1106938fd1498Szrj if (tem != NULL_TREE)
1107038fd1498Szrj return tem;
1107138fd1498Szrj
1107238fd1498Szrj /* Transform comparisons of the form X +- C CMP X. */
1107338fd1498Szrj if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
1107438fd1498Szrj && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
1107538fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
1107638fd1498Szrj && !HONOR_SNANS (arg0))
1107738fd1498Szrj {
1107838fd1498Szrj tree arg01 = TREE_OPERAND (arg0, 1);
1107938fd1498Szrj enum tree_code code0 = TREE_CODE (arg0);
1108038fd1498Szrj int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
1108138fd1498Szrj
1108238fd1498Szrj /* (X - c) > X becomes false. */
1108338fd1498Szrj if (code == GT_EXPR
1108438fd1498Szrj && ((code0 == MINUS_EXPR && is_positive >= 0)
1108538fd1498Szrj || (code0 == PLUS_EXPR && is_positive <= 0)))
1108638fd1498Szrj return constant_boolean_node (0, type);
1108738fd1498Szrj
1108838fd1498Szrj /* Likewise (X + c) < X becomes false. */
1108938fd1498Szrj if (code == LT_EXPR
1109038fd1498Szrj && ((code0 == PLUS_EXPR && is_positive >= 0)
1109138fd1498Szrj || (code0 == MINUS_EXPR && is_positive <= 0)))
1109238fd1498Szrj return constant_boolean_node (0, type);
1109338fd1498Szrj
1109438fd1498Szrj /* Convert (X - c) <= X to true. */
1109538fd1498Szrj if (!HONOR_NANS (arg1)
1109638fd1498Szrj && code == LE_EXPR
1109738fd1498Szrj && ((code0 == MINUS_EXPR && is_positive >= 0)
1109838fd1498Szrj || (code0 == PLUS_EXPR && is_positive <= 0)))
1109938fd1498Szrj return constant_boolean_node (1, type);
1110038fd1498Szrj
1110138fd1498Szrj /* Convert (X + c) >= X to true. */
1110238fd1498Szrj if (!HONOR_NANS (arg1)
1110338fd1498Szrj && code == GE_EXPR
1110438fd1498Szrj && ((code0 == PLUS_EXPR && is_positive >= 0)
1110538fd1498Szrj || (code0 == MINUS_EXPR && is_positive <= 0)))
1110638fd1498Szrj return constant_boolean_node (1, type);
1110738fd1498Szrj }
1110838fd1498Szrj
1110938fd1498Szrj /* If we are comparing an ABS_EXPR with a constant, we can
1111038fd1498Szrj convert all the cases into explicit comparisons, but they may
1111138fd1498Szrj well not be faster than doing the ABS and one comparison.
1111238fd1498Szrj But ABS (X) <= C is a range comparison, which becomes a subtraction
1111338fd1498Szrj and a comparison, and is probably faster. */
1111438fd1498Szrj if (code == LE_EXPR
1111538fd1498Szrj && TREE_CODE (arg1) == INTEGER_CST
1111638fd1498Szrj && TREE_CODE (arg0) == ABS_EXPR
1111738fd1498Szrj && ! TREE_SIDE_EFFECTS (arg0)
1111838fd1498Szrj && (tem = negate_expr (arg1)) != 0
1111938fd1498Szrj && TREE_CODE (tem) == INTEGER_CST
1112038fd1498Szrj && !TREE_OVERFLOW (tem))
1112138fd1498Szrj return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
1112238fd1498Szrj build2 (GE_EXPR, type,
1112338fd1498Szrj TREE_OPERAND (arg0, 0), tem),
1112438fd1498Szrj build2 (LE_EXPR, type,
1112538fd1498Szrj TREE_OPERAND (arg0, 0), arg1));
1112638fd1498Szrj
1112738fd1498Szrj /* Convert ABS_EXPR<x> >= 0 to true. */
1112838fd1498Szrj strict_overflow_p = false;
1112938fd1498Szrj if (code == GE_EXPR
1113038fd1498Szrj && (integer_zerop (arg1)
1113138fd1498Szrj || (! HONOR_NANS (arg0)
1113238fd1498Szrj && real_zerop (arg1)))
1113338fd1498Szrj && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
1113438fd1498Szrj {
1113538fd1498Szrj if (strict_overflow_p)
1113638fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur "
1113738fd1498Szrj "when simplifying comparison of "
1113838fd1498Szrj "absolute value and zero"),
1113938fd1498Szrj WARN_STRICT_OVERFLOW_CONDITIONAL);
1114038fd1498Szrj return omit_one_operand_loc (loc, type,
1114138fd1498Szrj constant_boolean_node (true, type),
1114238fd1498Szrj arg0);
1114338fd1498Szrj }
1114438fd1498Szrj
1114538fd1498Szrj /* Convert ABS_EXPR<x> < 0 to false. */
1114638fd1498Szrj strict_overflow_p = false;
1114738fd1498Szrj if (code == LT_EXPR
1114838fd1498Szrj && (integer_zerop (arg1) || real_zerop (arg1))
1114938fd1498Szrj && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
1115038fd1498Szrj {
1115138fd1498Szrj if (strict_overflow_p)
1115238fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur "
1115338fd1498Szrj "when simplifying comparison of "
1115438fd1498Szrj "absolute value and zero"),
1115538fd1498Szrj WARN_STRICT_OVERFLOW_CONDITIONAL);
1115638fd1498Szrj return omit_one_operand_loc (loc, type,
1115738fd1498Szrj constant_boolean_node (false, type),
1115838fd1498Szrj arg0);
1115938fd1498Szrj }
1116038fd1498Szrj
1116138fd1498Szrj /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
1116238fd1498Szrj and similarly for >= into !=. */
1116338fd1498Szrj if ((code == LT_EXPR || code == GE_EXPR)
1116438fd1498Szrj && TYPE_UNSIGNED (TREE_TYPE (arg0))
1116538fd1498Szrj && TREE_CODE (arg1) == LSHIFT_EXPR
1116638fd1498Szrj && integer_onep (TREE_OPERAND (arg1, 0)))
1116738fd1498Szrj return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
1116838fd1498Szrj build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
1116938fd1498Szrj TREE_OPERAND (arg1, 1)),
1117038fd1498Szrj build_zero_cst (TREE_TYPE (arg0)));
1117138fd1498Szrj
1117238fd1498Szrj /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
1117338fd1498Szrj otherwise Y might be >= # of bits in X's type and thus e.g.
1117438fd1498Szrj (unsigned char) (1 << Y) for Y 15 might be 0.
1117538fd1498Szrj If the cast is widening, then 1 << Y should have unsigned type,
1117638fd1498Szrj otherwise if Y is number of bits in the signed shift type minus 1,
1117738fd1498Szrj we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
1117838fd1498Szrj 31 might be 0xffffffff80000000. */
1117938fd1498Szrj if ((code == LT_EXPR || code == GE_EXPR)
1118038fd1498Szrj && TYPE_UNSIGNED (TREE_TYPE (arg0))
1118138fd1498Szrj && CONVERT_EXPR_P (arg1)
1118238fd1498Szrj && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
1118338fd1498Szrj && (element_precision (TREE_TYPE (arg1))
1118438fd1498Szrj >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1118538fd1498Szrj && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
1118638fd1498Szrj || (element_precision (TREE_TYPE (arg1))
1118738fd1498Szrj == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
1118838fd1498Szrj && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
1118938fd1498Szrj {
1119038fd1498Szrj tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
1119138fd1498Szrj TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
1119238fd1498Szrj return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
1119338fd1498Szrj fold_convert_loc (loc, TREE_TYPE (arg0), tem),
1119438fd1498Szrj build_zero_cst (TREE_TYPE (arg0)));
1119538fd1498Szrj }
1119638fd1498Szrj
1119738fd1498Szrj return NULL_TREE;
1119838fd1498Szrj
1119938fd1498Szrj case UNORDERED_EXPR:
1120038fd1498Szrj case ORDERED_EXPR:
1120138fd1498Szrj case UNLT_EXPR:
1120238fd1498Szrj case UNLE_EXPR:
1120338fd1498Szrj case UNGT_EXPR:
1120438fd1498Szrj case UNGE_EXPR:
1120538fd1498Szrj case UNEQ_EXPR:
1120638fd1498Szrj case LTGT_EXPR:
1120738fd1498Szrj /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
1120838fd1498Szrj {
1120938fd1498Szrj tree targ0 = strip_float_extensions (arg0);
1121038fd1498Szrj tree targ1 = strip_float_extensions (arg1);
1121138fd1498Szrj tree newtype = TREE_TYPE (targ0);
1121238fd1498Szrj
1121338fd1498Szrj if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
1121438fd1498Szrj newtype = TREE_TYPE (targ1);
1121538fd1498Szrj
1121638fd1498Szrj if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
1121738fd1498Szrj return fold_build2_loc (loc, code, type,
1121838fd1498Szrj fold_convert_loc (loc, newtype, targ0),
1121938fd1498Szrj fold_convert_loc (loc, newtype, targ1));
1122038fd1498Szrj }
1122138fd1498Szrj
1122238fd1498Szrj return NULL_TREE;
1122338fd1498Szrj
1122438fd1498Szrj case COMPOUND_EXPR:
1122538fd1498Szrj /* When pedantic, a compound expression can be neither an lvalue
1122638fd1498Szrj nor an integer constant expression. */
1122738fd1498Szrj if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
1122838fd1498Szrj return NULL_TREE;
1122938fd1498Szrj /* Don't let (0, 0) be null pointer constant. */
1123038fd1498Szrj tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
1123138fd1498Szrj : fold_convert_loc (loc, type, arg1);
1123238fd1498Szrj return pedantic_non_lvalue_loc (loc, tem);
1123338fd1498Szrj
1123438fd1498Szrj case ASSERT_EXPR:
1123538fd1498Szrj /* An ASSERT_EXPR should never be passed to fold_binary. */
1123638fd1498Szrj gcc_unreachable ();
1123738fd1498Szrj
1123838fd1498Szrj default:
1123938fd1498Szrj return NULL_TREE;
1124038fd1498Szrj } /* switch (code) */
1124138fd1498Szrj }
1124238fd1498Szrj
1124338fd1498Szrj /* Used by contains_label_[p1]. */
1124438fd1498Szrj
1124538fd1498Szrj struct contains_label_data
1124638fd1498Szrj {
1124738fd1498Szrj hash_set<tree> *pset;
1124838fd1498Szrj bool inside_switch_p;
1124938fd1498Szrj };
1125038fd1498Szrj
1125138fd1498Szrj /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
1125238fd1498Szrj a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
1125338fd1498Szrj return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
1125438fd1498Szrj
1125538fd1498Szrj static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)1125638fd1498Szrj contains_label_1 (tree *tp, int *walk_subtrees, void *data)
1125738fd1498Szrj {
1125838fd1498Szrj contains_label_data *d = (contains_label_data *) data;
1125938fd1498Szrj switch (TREE_CODE (*tp))
1126038fd1498Szrj {
1126138fd1498Szrj case LABEL_EXPR:
1126238fd1498Szrj return *tp;
1126338fd1498Szrj
1126438fd1498Szrj case CASE_LABEL_EXPR:
1126538fd1498Szrj if (!d->inside_switch_p)
1126638fd1498Szrj return *tp;
1126738fd1498Szrj return NULL_TREE;
1126838fd1498Szrj
1126938fd1498Szrj case SWITCH_EXPR:
1127038fd1498Szrj if (!d->inside_switch_p)
1127138fd1498Szrj {
1127238fd1498Szrj if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
1127338fd1498Szrj return *tp;
1127438fd1498Szrj d->inside_switch_p = true;
1127538fd1498Szrj if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
1127638fd1498Szrj return *tp;
1127738fd1498Szrj d->inside_switch_p = false;
1127838fd1498Szrj *walk_subtrees = 0;
1127938fd1498Szrj }
1128038fd1498Szrj return NULL_TREE;
1128138fd1498Szrj
1128238fd1498Szrj case GOTO_EXPR:
1128338fd1498Szrj *walk_subtrees = 0;
1128438fd1498Szrj return NULL_TREE;
1128538fd1498Szrj
1128638fd1498Szrj default:
1128738fd1498Szrj return NULL_TREE;
1128838fd1498Szrj }
1128938fd1498Szrj }
1129038fd1498Szrj
1129138fd1498Szrj /* Return whether the sub-tree ST contains a label which is accessible from
1129238fd1498Szrj outside the sub-tree. */
1129338fd1498Szrj
1129438fd1498Szrj static bool
contains_label_p(tree st)1129538fd1498Szrj contains_label_p (tree st)
1129638fd1498Szrj {
1129738fd1498Szrj hash_set<tree> pset;
1129838fd1498Szrj contains_label_data data = { &pset, false };
1129938fd1498Szrj return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
1130038fd1498Szrj }
1130138fd1498Szrj
1130238fd1498Szrj /* Fold a ternary expression of code CODE and type TYPE with operands
1130338fd1498Szrj OP0, OP1, and OP2. Return the folded expression if folding is
1130438fd1498Szrj successful. Otherwise, return NULL_TREE. */
1130538fd1498Szrj
1130638fd1498Szrj tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)1130738fd1498Szrj fold_ternary_loc (location_t loc, enum tree_code code, tree type,
1130838fd1498Szrj tree op0, tree op1, tree op2)
1130938fd1498Szrj {
1131038fd1498Szrj tree tem;
1131138fd1498Szrj tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
1131238fd1498Szrj enum tree_code_class kind = TREE_CODE_CLASS (code);
1131338fd1498Szrj
1131438fd1498Szrj gcc_assert (IS_EXPR_CODE_CLASS (kind)
1131538fd1498Szrj && TREE_CODE_LENGTH (code) == 3);
1131638fd1498Szrj
1131738fd1498Szrj /* If this is a commutative operation, and OP0 is a constant, move it
1131838fd1498Szrj to OP1 to reduce the number of tests below. */
1131938fd1498Szrj if (commutative_ternary_tree_code (code)
1132038fd1498Szrj && tree_swap_operands_p (op0, op1))
1132138fd1498Szrj return fold_build3_loc (loc, code, type, op1, op0, op2);
1132238fd1498Szrj
1132338fd1498Szrj tem = generic_simplify (loc, code, type, op0, op1, op2);
1132438fd1498Szrj if (tem)
1132538fd1498Szrj return tem;
1132638fd1498Szrj
1132738fd1498Szrj /* Strip any conversions that don't change the mode. This is safe
1132838fd1498Szrj for every expression, except for a comparison expression because
1132938fd1498Szrj its signedness is derived from its operands. So, in the latter
1133038fd1498Szrj case, only strip conversions that don't change the signedness.
1133138fd1498Szrj
1133238fd1498Szrj Note that this is done as an internal manipulation within the
1133338fd1498Szrj constant folder, in order to find the simplest representation of
1133438fd1498Szrj the arguments so that their form can be studied. In any cases,
1133538fd1498Szrj the appropriate type conversions should be put back in the tree
1133638fd1498Szrj that will get out of the constant folder. */
1133738fd1498Szrj if (op0)
1133838fd1498Szrj {
1133938fd1498Szrj arg0 = op0;
1134038fd1498Szrj STRIP_NOPS (arg0);
1134138fd1498Szrj }
1134238fd1498Szrj
1134338fd1498Szrj if (op1)
1134438fd1498Szrj {
1134538fd1498Szrj arg1 = op1;
1134638fd1498Szrj STRIP_NOPS (arg1);
1134738fd1498Szrj }
1134838fd1498Szrj
1134938fd1498Szrj if (op2)
1135038fd1498Szrj {
1135138fd1498Szrj arg2 = op2;
1135238fd1498Szrj STRIP_NOPS (arg2);
1135338fd1498Szrj }
1135438fd1498Szrj
1135538fd1498Szrj switch (code)
1135638fd1498Szrj {
1135738fd1498Szrj case COMPONENT_REF:
1135838fd1498Szrj if (TREE_CODE (arg0) == CONSTRUCTOR
1135938fd1498Szrj && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
1136038fd1498Szrj {
1136138fd1498Szrj unsigned HOST_WIDE_INT idx;
1136238fd1498Szrj tree field, value;
1136338fd1498Szrj FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
1136438fd1498Szrj if (field == arg1)
1136538fd1498Szrj return value;
1136638fd1498Szrj }
1136738fd1498Szrj return NULL_TREE;
1136838fd1498Szrj
1136938fd1498Szrj case COND_EXPR:
1137038fd1498Szrj case VEC_COND_EXPR:
1137138fd1498Szrj /* Pedantic ANSI C says that a conditional expression is never an lvalue,
1137238fd1498Szrj so all simple results must be passed through pedantic_non_lvalue. */
1137338fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST)
1137438fd1498Szrj {
1137538fd1498Szrj tree unused_op = integer_zerop (arg0) ? op1 : op2;
1137638fd1498Szrj tem = integer_zerop (arg0) ? op2 : op1;
1137738fd1498Szrj /* Only optimize constant conditions when the selected branch
1137838fd1498Szrj has the same type as the COND_EXPR. This avoids optimizing
1137938fd1498Szrj away "c ? x : throw", where the throw has a void type.
1138038fd1498Szrj Avoid throwing away that operand which contains label. */
1138138fd1498Szrj if ((!TREE_SIDE_EFFECTS (unused_op)
1138238fd1498Szrj || !contains_label_p (unused_op))
1138338fd1498Szrj && (! VOID_TYPE_P (TREE_TYPE (tem))
1138438fd1498Szrj || VOID_TYPE_P (type)))
1138538fd1498Szrj return pedantic_non_lvalue_loc (loc, tem);
1138638fd1498Szrj return NULL_TREE;
1138738fd1498Szrj }
1138838fd1498Szrj else if (TREE_CODE (arg0) == VECTOR_CST)
1138938fd1498Szrj {
1139038fd1498Szrj unsigned HOST_WIDE_INT nelts;
1139138fd1498Szrj if ((TREE_CODE (arg1) == VECTOR_CST
1139238fd1498Szrj || TREE_CODE (arg1) == CONSTRUCTOR)
1139338fd1498Szrj && (TREE_CODE (arg2) == VECTOR_CST
1139438fd1498Szrj || TREE_CODE (arg2) == CONSTRUCTOR)
1139538fd1498Szrj && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
1139638fd1498Szrj {
1139738fd1498Szrj vec_perm_builder sel (nelts, nelts, 1);
1139838fd1498Szrj for (unsigned int i = 0; i < nelts; i++)
1139938fd1498Szrj {
1140038fd1498Szrj tree val = VECTOR_CST_ELT (arg0, i);
1140138fd1498Szrj if (integer_all_onesp (val))
1140238fd1498Szrj sel.quick_push (i);
1140338fd1498Szrj else if (integer_zerop (val))
1140438fd1498Szrj sel.quick_push (nelts + i);
1140538fd1498Szrj else /* Currently unreachable. */
1140638fd1498Szrj return NULL_TREE;
1140738fd1498Szrj }
1140838fd1498Szrj vec_perm_indices indices (sel, 2, nelts);
1140938fd1498Szrj tree t = fold_vec_perm (type, arg1, arg2, indices);
1141038fd1498Szrj if (t != NULL_TREE)
1141138fd1498Szrj return t;
1141238fd1498Szrj }
1141338fd1498Szrj }
1141438fd1498Szrj
1141538fd1498Szrj /* If we have A op B ? A : C, we may be able to convert this to a
1141638fd1498Szrj simpler expression, depending on the operation and the values
1141738fd1498Szrj of B and C. Signed zeros prevent all of these transformations,
1141838fd1498Szrj for reasons given above each one.
1141938fd1498Szrj
1142038fd1498Szrj Also try swapping the arguments and inverting the conditional. */
1142138fd1498Szrj if (COMPARISON_CLASS_P (arg0)
1142238fd1498Szrj && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
1142338fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (op1)))
1142438fd1498Szrj {
1142538fd1498Szrj tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
1142638fd1498Szrj if (tem)
1142738fd1498Szrj return tem;
1142838fd1498Szrj }
1142938fd1498Szrj
1143038fd1498Szrj if (COMPARISON_CLASS_P (arg0)
1143138fd1498Szrj && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
1143238fd1498Szrj && !HONOR_SIGNED_ZEROS (element_mode (op2)))
1143338fd1498Szrj {
1143438fd1498Szrj location_t loc0 = expr_location_or (arg0, loc);
1143538fd1498Szrj tem = fold_invert_truthvalue (loc0, arg0);
1143638fd1498Szrj if (tem && COMPARISON_CLASS_P (tem))
1143738fd1498Szrj {
1143838fd1498Szrj tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
1143938fd1498Szrj if (tem)
1144038fd1498Szrj return tem;
1144138fd1498Szrj }
1144238fd1498Szrj }
1144338fd1498Szrj
1144438fd1498Szrj /* If the second operand is simpler than the third, swap them
1144538fd1498Szrj since that produces better jump optimization results. */
1144638fd1498Szrj if (truth_value_p (TREE_CODE (arg0))
1144738fd1498Szrj && tree_swap_operands_p (op1, op2))
1144838fd1498Szrj {
1144938fd1498Szrj location_t loc0 = expr_location_or (arg0, loc);
1145038fd1498Szrj /* See if this can be inverted. If it can't, possibly because
1145138fd1498Szrj it was a floating-point inequality comparison, don't do
1145238fd1498Szrj anything. */
1145338fd1498Szrj tem = fold_invert_truthvalue (loc0, arg0);
1145438fd1498Szrj if (tem)
1145538fd1498Szrj return fold_build3_loc (loc, code, type, tem, op2, op1);
1145638fd1498Szrj }
1145738fd1498Szrj
1145838fd1498Szrj /* Convert A ? 1 : 0 to simply A. */
1145938fd1498Szrj if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
1146038fd1498Szrj : (integer_onep (op1)
1146138fd1498Szrj && !VECTOR_TYPE_P (type)))
1146238fd1498Szrj && integer_zerop (op2)
1146338fd1498Szrj /* If we try to convert OP0 to our type, the
1146438fd1498Szrj call to fold will try to move the conversion inside
1146538fd1498Szrj a COND, which will recurse. In that case, the COND_EXPR
1146638fd1498Szrj is probably the best choice, so leave it alone. */
1146738fd1498Szrj && type == TREE_TYPE (arg0))
1146838fd1498Szrj return pedantic_non_lvalue_loc (loc, arg0);
1146938fd1498Szrj
1147038fd1498Szrj /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
1147138fd1498Szrj over COND_EXPR in cases such as floating point comparisons. */
1147238fd1498Szrj if (integer_zerop (op1)
1147338fd1498Szrj && code == COND_EXPR
1147438fd1498Szrj && integer_onep (op2)
1147538fd1498Szrj && !VECTOR_TYPE_P (type)
1147638fd1498Szrj && truth_value_p (TREE_CODE (arg0)))
1147738fd1498Szrj return pedantic_non_lvalue_loc (loc,
1147838fd1498Szrj fold_convert_loc (loc, type,
1147938fd1498Szrj invert_truthvalue_loc (loc,
1148038fd1498Szrj arg0)));
1148138fd1498Szrj
1148238fd1498Szrj /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
1148338fd1498Szrj if (TREE_CODE (arg0) == LT_EXPR
1148438fd1498Szrj && integer_zerop (TREE_OPERAND (arg0, 1))
1148538fd1498Szrj && integer_zerop (op2)
1148638fd1498Szrj && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
1148738fd1498Szrj {
1148838fd1498Szrj /* sign_bit_p looks through both zero and sign extensions,
1148938fd1498Szrj but for this optimization only sign extensions are
1149038fd1498Szrj usable. */
1149138fd1498Szrj tree tem2 = TREE_OPERAND (arg0, 0);
1149238fd1498Szrj while (tem != tem2)
1149338fd1498Szrj {
1149438fd1498Szrj if (TREE_CODE (tem2) != NOP_EXPR
1149538fd1498Szrj || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
1149638fd1498Szrj {
1149738fd1498Szrj tem = NULL_TREE;
1149838fd1498Szrj break;
1149938fd1498Szrj }
1150038fd1498Szrj tem2 = TREE_OPERAND (tem2, 0);
1150138fd1498Szrj }
1150238fd1498Szrj /* sign_bit_p only checks ARG1 bits within A's precision.
1150338fd1498Szrj If <sign bit of A> has wider type than A, bits outside
1150438fd1498Szrj of A's precision in <sign bit of A> need to be checked.
1150538fd1498Szrj If they are all 0, this optimization needs to be done
1150638fd1498Szrj in unsigned A's type, if they are all 1 in signed A's type,
1150738fd1498Szrj otherwise this can't be done. */
1150838fd1498Szrj if (tem
1150938fd1498Szrj && TYPE_PRECISION (TREE_TYPE (tem))
1151038fd1498Szrj < TYPE_PRECISION (TREE_TYPE (arg1))
1151138fd1498Szrj && TYPE_PRECISION (TREE_TYPE (tem))
1151238fd1498Szrj < TYPE_PRECISION (type))
1151338fd1498Szrj {
1151438fd1498Szrj int inner_width, outer_width;
1151538fd1498Szrj tree tem_type;
1151638fd1498Szrj
1151738fd1498Szrj inner_width = TYPE_PRECISION (TREE_TYPE (tem));
1151838fd1498Szrj outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
1151938fd1498Szrj if (outer_width > TYPE_PRECISION (type))
1152038fd1498Szrj outer_width = TYPE_PRECISION (type);
1152138fd1498Szrj
1152238fd1498Szrj wide_int mask = wi::shifted_mask
1152338fd1498Szrj (inner_width, outer_width - inner_width, false,
1152438fd1498Szrj TYPE_PRECISION (TREE_TYPE (arg1)));
1152538fd1498Szrj
1152638fd1498Szrj wide_int common = mask & wi::to_wide (arg1);
1152738fd1498Szrj if (common == mask)
1152838fd1498Szrj {
1152938fd1498Szrj tem_type = signed_type_for (TREE_TYPE (tem));
1153038fd1498Szrj tem = fold_convert_loc (loc, tem_type, tem);
1153138fd1498Szrj }
1153238fd1498Szrj else if (common == 0)
1153338fd1498Szrj {
1153438fd1498Szrj tem_type = unsigned_type_for (TREE_TYPE (tem));
1153538fd1498Szrj tem = fold_convert_loc (loc, tem_type, tem);
1153638fd1498Szrj }
1153738fd1498Szrj else
1153838fd1498Szrj tem = NULL;
1153938fd1498Szrj }
1154038fd1498Szrj
1154138fd1498Szrj if (tem)
1154238fd1498Szrj return
1154338fd1498Szrj fold_convert_loc (loc, type,
1154438fd1498Szrj fold_build2_loc (loc, BIT_AND_EXPR,
1154538fd1498Szrj TREE_TYPE (tem), tem,
1154638fd1498Szrj fold_convert_loc (loc,
1154738fd1498Szrj TREE_TYPE (tem),
1154838fd1498Szrj arg1)));
1154938fd1498Szrj }
1155038fd1498Szrj
1155138fd1498Szrj /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
1155238fd1498Szrj already handled above. */
1155338fd1498Szrj if (TREE_CODE (arg0) == BIT_AND_EXPR
1155438fd1498Szrj && integer_onep (TREE_OPERAND (arg0, 1))
1155538fd1498Szrj && integer_zerop (op2)
1155638fd1498Szrj && integer_pow2p (arg1))
1155738fd1498Szrj {
1155838fd1498Szrj tree tem = TREE_OPERAND (arg0, 0);
1155938fd1498Szrj STRIP_NOPS (tem);
1156038fd1498Szrj if (TREE_CODE (tem) == RSHIFT_EXPR
1156138fd1498Szrj && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
1156238fd1498Szrj && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
1156338fd1498Szrj == tree_to_uhwi (TREE_OPERAND (tem, 1)))
1156438fd1498Szrj return fold_build2_loc (loc, BIT_AND_EXPR, type,
1156538fd1498Szrj fold_convert_loc (loc, type,
1156638fd1498Szrj TREE_OPERAND (tem, 0)),
1156738fd1498Szrj op1);
1156838fd1498Szrj }
1156938fd1498Szrj
1157038fd1498Szrj /* A & N ? N : 0 is simply A & N if N is a power of two. This
1157138fd1498Szrj is probably obsolete because the first operand should be a
1157238fd1498Szrj truth value (that's why we have the two cases above), but let's
1157338fd1498Szrj leave it in until we can confirm this for all front-ends. */
1157438fd1498Szrj if (integer_zerop (op2)
1157538fd1498Szrj && TREE_CODE (arg0) == NE_EXPR
1157638fd1498Szrj && integer_zerop (TREE_OPERAND (arg0, 1))
1157738fd1498Szrj && integer_pow2p (arg1)
1157838fd1498Szrj && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
1157938fd1498Szrj && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
1158058e805e6Szrj arg1, OEP_ONLY_CONST)
1158158e805e6Szrj /* operand_equal_p compares just value, not precision, so e.g.
1158258e805e6Szrj arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
1158358e805e6Szrj second operand 32-bit -128, which is not a power of two (or vice
1158458e805e6Szrj versa. */
1158558e805e6Szrj && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
1158638fd1498Szrj return pedantic_non_lvalue_loc (loc,
1158738fd1498Szrj fold_convert_loc (loc, type,
1158858e805e6Szrj TREE_OPERAND (arg0,
1158958e805e6Szrj 0)));
1159038fd1498Szrj
1159138fd1498Szrj /* Disable the transformations below for vectors, since
1159238fd1498Szrj fold_binary_op_with_conditional_arg may undo them immediately,
1159338fd1498Szrj yielding an infinite loop. */
1159438fd1498Szrj if (code == VEC_COND_EXPR)
1159538fd1498Szrj return NULL_TREE;
1159638fd1498Szrj
1159738fd1498Szrj /* Convert A ? B : 0 into A && B if A and B are truth values. */
1159838fd1498Szrj if (integer_zerop (op2)
1159938fd1498Szrj && truth_value_p (TREE_CODE (arg0))
1160038fd1498Szrj && truth_value_p (TREE_CODE (arg1))
1160138fd1498Szrj && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
1160238fd1498Szrj return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
1160338fd1498Szrj : TRUTH_ANDIF_EXPR,
1160438fd1498Szrj type, fold_convert_loc (loc, type, arg0), op1);
1160538fd1498Szrj
1160638fd1498Szrj /* Convert A ? B : 1 into !A || B if A and B are truth values. */
1160738fd1498Szrj if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
1160838fd1498Szrj && truth_value_p (TREE_CODE (arg0))
1160938fd1498Szrj && truth_value_p (TREE_CODE (arg1))
1161038fd1498Szrj && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
1161138fd1498Szrj {
1161238fd1498Szrj location_t loc0 = expr_location_or (arg0, loc);
1161338fd1498Szrj /* Only perform transformation if ARG0 is easily inverted. */
1161438fd1498Szrj tem = fold_invert_truthvalue (loc0, arg0);
1161538fd1498Szrj if (tem)
1161638fd1498Szrj return fold_build2_loc (loc, code == VEC_COND_EXPR
1161738fd1498Szrj ? BIT_IOR_EXPR
1161838fd1498Szrj : TRUTH_ORIF_EXPR,
1161938fd1498Szrj type, fold_convert_loc (loc, type, tem),
1162038fd1498Szrj op1);
1162138fd1498Szrj }
1162238fd1498Szrj
1162338fd1498Szrj /* Convert A ? 0 : B into !A && B if A and B are truth values. */
1162438fd1498Szrj if (integer_zerop (arg1)
1162538fd1498Szrj && truth_value_p (TREE_CODE (arg0))
1162638fd1498Szrj && truth_value_p (TREE_CODE (op2))
1162738fd1498Szrj && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
1162838fd1498Szrj {
1162938fd1498Szrj location_t loc0 = expr_location_or (arg0, loc);
1163038fd1498Szrj /* Only perform transformation if ARG0 is easily inverted. */
1163138fd1498Szrj tem = fold_invert_truthvalue (loc0, arg0);
1163238fd1498Szrj if (tem)
1163338fd1498Szrj return fold_build2_loc (loc, code == VEC_COND_EXPR
1163438fd1498Szrj ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
1163538fd1498Szrj type, fold_convert_loc (loc, type, tem),
1163638fd1498Szrj op2);
1163738fd1498Szrj }
1163838fd1498Szrj
1163938fd1498Szrj /* Convert A ? 1 : B into A || B if A and B are truth values. */
1164038fd1498Szrj if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
1164138fd1498Szrj && truth_value_p (TREE_CODE (arg0))
1164238fd1498Szrj && truth_value_p (TREE_CODE (op2))
1164338fd1498Szrj && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
1164438fd1498Szrj return fold_build2_loc (loc, code == VEC_COND_EXPR
1164538fd1498Szrj ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
1164638fd1498Szrj type, fold_convert_loc (loc, type, arg0), op2);
1164738fd1498Szrj
1164838fd1498Szrj return NULL_TREE;
1164938fd1498Szrj
1165038fd1498Szrj case CALL_EXPR:
1165138fd1498Szrj /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
1165238fd1498Szrj of fold_ternary on them. */
1165338fd1498Szrj gcc_unreachable ();
1165438fd1498Szrj
1165538fd1498Szrj case BIT_FIELD_REF:
1165638fd1498Szrj if (TREE_CODE (arg0) == VECTOR_CST
1165738fd1498Szrj && (type == TREE_TYPE (TREE_TYPE (arg0))
1165838fd1498Szrj || (VECTOR_TYPE_P (type)
1165938fd1498Szrj && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
1166038fd1498Szrj && tree_fits_uhwi_p (op1)
1166138fd1498Szrj && tree_fits_uhwi_p (op2))
1166238fd1498Szrj {
1166338fd1498Szrj tree eltype = TREE_TYPE (TREE_TYPE (arg0));
1166438fd1498Szrj unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
1166538fd1498Szrj unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
1166638fd1498Szrj unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
1166738fd1498Szrj
1166838fd1498Szrj if (n != 0
1166938fd1498Szrj && (idx % width) == 0
1167038fd1498Szrj && (n % width) == 0
1167138fd1498Szrj && known_le ((idx + n) / width,
1167238fd1498Szrj TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
1167338fd1498Szrj {
1167438fd1498Szrj idx = idx / width;
1167538fd1498Szrj n = n / width;
1167638fd1498Szrj
1167738fd1498Szrj if (TREE_CODE (arg0) == VECTOR_CST)
1167838fd1498Szrj {
1167938fd1498Szrj if (n == 1)
1168038fd1498Szrj {
1168138fd1498Szrj tem = VECTOR_CST_ELT (arg0, idx);
1168238fd1498Szrj if (VECTOR_TYPE_P (type))
1168338fd1498Szrj tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
1168438fd1498Szrj return tem;
1168538fd1498Szrj }
1168638fd1498Szrj
1168738fd1498Szrj tree_vector_builder vals (type, n, 1);
1168838fd1498Szrj for (unsigned i = 0; i < n; ++i)
1168938fd1498Szrj vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
1169038fd1498Szrj return vals.build ();
1169138fd1498Szrj }
1169238fd1498Szrj }
1169338fd1498Szrj }
1169438fd1498Szrj
1169538fd1498Szrj /* On constants we can use native encode/interpret to constant
1169638fd1498Szrj fold (nearly) all BIT_FIELD_REFs. */
1169738fd1498Szrj if (CONSTANT_CLASS_P (arg0)
1169838fd1498Szrj && can_native_interpret_type_p (type)
1169938fd1498Szrj && BITS_PER_UNIT == 8
1170038fd1498Szrj && tree_fits_uhwi_p (op1)
1170138fd1498Szrj && tree_fits_uhwi_p (op2))
1170238fd1498Szrj {
1170338fd1498Szrj unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
1170438fd1498Szrj unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
1170538fd1498Szrj /* Limit us to a reasonable amount of work. To relax the
1170638fd1498Szrj other limitations we need bit-shifting of the buffer
1170738fd1498Szrj and rounding up the size. */
1170838fd1498Szrj if (bitpos % BITS_PER_UNIT == 0
1170938fd1498Szrj && bitsize % BITS_PER_UNIT == 0
1171038fd1498Szrj && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
1171138fd1498Szrj {
1171238fd1498Szrj unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1171338fd1498Szrj unsigned HOST_WIDE_INT len
1171438fd1498Szrj = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
1171538fd1498Szrj bitpos / BITS_PER_UNIT);
1171638fd1498Szrj if (len > 0
1171738fd1498Szrj && len * BITS_PER_UNIT >= bitsize)
1171838fd1498Szrj {
1171938fd1498Szrj tree v = native_interpret_expr (type, b,
1172038fd1498Szrj bitsize / BITS_PER_UNIT);
1172138fd1498Szrj if (v)
1172238fd1498Szrj return v;
1172338fd1498Szrj }
1172438fd1498Szrj }
1172538fd1498Szrj }
1172638fd1498Szrj
1172738fd1498Szrj return NULL_TREE;
1172838fd1498Szrj
1172938fd1498Szrj case FMA_EXPR:
1173038fd1498Szrj /* For integers we can decompose the FMA if possible. */
1173138fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST
1173238fd1498Szrj && TREE_CODE (arg1) == INTEGER_CST)
1173338fd1498Szrj return fold_build2_loc (loc, PLUS_EXPR, type,
1173438fd1498Szrj const_binop (MULT_EXPR, arg0, arg1), arg2);
1173538fd1498Szrj if (integer_zerop (arg2))
1173638fd1498Szrj return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
1173738fd1498Szrj
1173838fd1498Szrj return fold_fma (loc, type, arg0, arg1, arg2);
1173938fd1498Szrj
1174038fd1498Szrj case VEC_PERM_EXPR:
1174138fd1498Szrj if (TREE_CODE (arg2) == VECTOR_CST)
1174238fd1498Szrj {
1174338fd1498Szrj /* Build a vector of integers from the tree mask. */
1174438fd1498Szrj vec_perm_builder builder;
1174538fd1498Szrj if (!tree_to_vec_perm_builder (&builder, arg2))
1174638fd1498Szrj return NULL_TREE;
1174738fd1498Szrj
1174838fd1498Szrj /* Create a vec_perm_indices for the integer vector. */
1174938fd1498Szrj poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
1175038fd1498Szrj bool single_arg = (op0 == op1);
1175138fd1498Szrj vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
1175238fd1498Szrj
1175338fd1498Szrj /* Check for cases that fold to OP0 or OP1 in their original
1175438fd1498Szrj element order. */
1175538fd1498Szrj if (sel.series_p (0, 1, 0, 1))
1175638fd1498Szrj return op0;
1175738fd1498Szrj if (sel.series_p (0, 1, nelts, 1))
1175838fd1498Szrj return op1;
1175938fd1498Szrj
1176038fd1498Szrj if (!single_arg)
1176138fd1498Szrj {
1176238fd1498Szrj if (sel.all_from_input_p (0))
1176338fd1498Szrj op1 = op0;
1176438fd1498Szrj else if (sel.all_from_input_p (1))
1176538fd1498Szrj {
1176638fd1498Szrj op0 = op1;
1176738fd1498Szrj sel.rotate_inputs (1);
1176838fd1498Szrj }
1176938fd1498Szrj }
1177038fd1498Szrj
1177138fd1498Szrj if ((TREE_CODE (op0) == VECTOR_CST
1177238fd1498Szrj || TREE_CODE (op0) == CONSTRUCTOR)
1177338fd1498Szrj && (TREE_CODE (op1) == VECTOR_CST
1177438fd1498Szrj || TREE_CODE (op1) == CONSTRUCTOR))
1177538fd1498Szrj {
1177638fd1498Szrj tree t = fold_vec_perm (type, op0, op1, sel);
1177738fd1498Szrj if (t != NULL_TREE)
1177838fd1498Szrj return t;
1177938fd1498Szrj }
1178038fd1498Szrj
1178138fd1498Szrj bool changed = (op0 == op1 && !single_arg);
1178238fd1498Szrj
1178338fd1498Szrj /* Generate a canonical form of the selector. */
1178438fd1498Szrj if (arg2 == op2 && sel.encoding () != builder)
1178538fd1498Szrj {
1178638fd1498Szrj /* Some targets are deficient and fail to expand a single
1178738fd1498Szrj argument permutation while still allowing an equivalent
1178838fd1498Szrj 2-argument version. */
1178938fd1498Szrj if (sel.ninputs () == 2
1179038fd1498Szrj || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
1179138fd1498Szrj op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
1179238fd1498Szrj else
1179338fd1498Szrj {
1179438fd1498Szrj vec_perm_indices sel2 (builder, 2, nelts);
1179538fd1498Szrj if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
1179638fd1498Szrj op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
1179738fd1498Szrj else
1179838fd1498Szrj /* Not directly supported with either encoding,
1179938fd1498Szrj so use the preferred form. */
1180038fd1498Szrj op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
1180138fd1498Szrj }
1180238fd1498Szrj changed = true;
1180338fd1498Szrj }
1180438fd1498Szrj
1180538fd1498Szrj if (changed)
1180638fd1498Szrj return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
1180738fd1498Szrj }
1180838fd1498Szrj return NULL_TREE;
1180938fd1498Szrj
1181038fd1498Szrj case BIT_INSERT_EXPR:
1181138fd1498Szrj /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
1181238fd1498Szrj if (TREE_CODE (arg0) == INTEGER_CST
1181338fd1498Szrj && TREE_CODE (arg1) == INTEGER_CST)
1181438fd1498Szrj {
1181538fd1498Szrj unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
1181638fd1498Szrj unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
1181738fd1498Szrj wide_int tem = (wi::to_wide (arg0)
1181838fd1498Szrj & wi::shifted_mask (bitpos, bitsize, true,
1181938fd1498Szrj TYPE_PRECISION (type)));
1182038fd1498Szrj wide_int tem2
1182138fd1498Szrj = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
1182238fd1498Szrj bitsize), bitpos);
1182338fd1498Szrj return wide_int_to_tree (type, wi::bit_or (tem, tem2));
1182438fd1498Szrj }
1182538fd1498Szrj else if (TREE_CODE (arg0) == VECTOR_CST
1182638fd1498Szrj && CONSTANT_CLASS_P (arg1)
1182738fd1498Szrj && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
1182838fd1498Szrj TREE_TYPE (arg1)))
1182938fd1498Szrj {
1183038fd1498Szrj unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
1183138fd1498Szrj unsigned HOST_WIDE_INT elsize
1183238fd1498Szrj = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
1183338fd1498Szrj if (bitpos % elsize == 0)
1183438fd1498Szrj {
1183538fd1498Szrj unsigned k = bitpos / elsize;
1183638fd1498Szrj unsigned HOST_WIDE_INT nelts;
1183738fd1498Szrj if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
1183838fd1498Szrj return arg0;
1183938fd1498Szrj else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
1184038fd1498Szrj {
1184138fd1498Szrj tree_vector_builder elts (type, nelts, 1);
1184238fd1498Szrj elts.quick_grow (nelts);
1184338fd1498Szrj for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
1184438fd1498Szrj elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
1184538fd1498Szrj return elts.build ();
1184638fd1498Szrj }
1184738fd1498Szrj }
1184838fd1498Szrj }
1184938fd1498Szrj return NULL_TREE;
1185038fd1498Szrj
1185138fd1498Szrj default:
1185238fd1498Szrj return NULL_TREE;
1185338fd1498Szrj } /* switch (code) */
1185438fd1498Szrj }
1185538fd1498Szrj
1185638fd1498Szrj /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
1185738fd1498Szrj of an array (or vector). */
1185838fd1498Szrj
1185938fd1498Szrj tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index)1186038fd1498Szrj get_array_ctor_element_at_index (tree ctor, offset_int access_index)
1186138fd1498Szrj {
1186238fd1498Szrj tree index_type = NULL_TREE;
1186338fd1498Szrj offset_int low_bound = 0;
1186438fd1498Szrj
1186538fd1498Szrj if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
1186638fd1498Szrj {
1186738fd1498Szrj tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
1186838fd1498Szrj if (domain_type && TYPE_MIN_VALUE (domain_type))
1186938fd1498Szrj {
1187038fd1498Szrj /* Static constructors for variably sized objects makes no sense. */
1187138fd1498Szrj gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
1187238fd1498Szrj index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
1187338fd1498Szrj low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
1187438fd1498Szrj }
1187538fd1498Szrj }
1187638fd1498Szrj
1187738fd1498Szrj if (index_type)
1187838fd1498Szrj access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
1187938fd1498Szrj TYPE_SIGN (index_type));
1188038fd1498Szrj
1188138fd1498Szrj offset_int index = low_bound - 1;
1188238fd1498Szrj if (index_type)
1188338fd1498Szrj index = wi::ext (index, TYPE_PRECISION (index_type),
1188438fd1498Szrj TYPE_SIGN (index_type));
1188538fd1498Szrj
1188638fd1498Szrj offset_int max_index;
1188738fd1498Szrj unsigned HOST_WIDE_INT cnt;
1188838fd1498Szrj tree cfield, cval;
1188938fd1498Szrj
1189038fd1498Szrj FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1189138fd1498Szrj {
1189238fd1498Szrj /* Array constructor might explicitly set index, or specify a range,
1189338fd1498Szrj or leave index NULL meaning that it is next index after previous
1189438fd1498Szrj one. */
1189538fd1498Szrj if (cfield)
1189638fd1498Szrj {
1189738fd1498Szrj if (TREE_CODE (cfield) == INTEGER_CST)
1189838fd1498Szrj max_index = index = wi::to_offset (cfield);
1189938fd1498Szrj else
1190038fd1498Szrj {
1190138fd1498Szrj gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
1190238fd1498Szrj index = wi::to_offset (TREE_OPERAND (cfield, 0));
1190338fd1498Szrj max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
1190438fd1498Szrj }
1190538fd1498Szrj }
1190638fd1498Szrj else
1190738fd1498Szrj {
1190838fd1498Szrj index += 1;
1190938fd1498Szrj if (index_type)
1191038fd1498Szrj index = wi::ext (index, TYPE_PRECISION (index_type),
1191138fd1498Szrj TYPE_SIGN (index_type));
1191238fd1498Szrj max_index = index;
1191338fd1498Szrj }
1191438fd1498Szrj
1191538fd1498Szrj /* Do we have match? */
1191638fd1498Szrj if (wi::cmpu (access_index, index) >= 0
1191738fd1498Szrj && wi::cmpu (access_index, max_index) <= 0)
1191838fd1498Szrj return cval;
1191938fd1498Szrj }
1192038fd1498Szrj return NULL_TREE;
1192138fd1498Szrj }
1192238fd1498Szrj
1192338fd1498Szrj /* Perform constant folding and related simplification of EXPR.
1192438fd1498Szrj The related simplifications include x*1 => x, x*0 => 0, etc.,
1192538fd1498Szrj and application of the associative law.
1192638fd1498Szrj NOP_EXPR conversions may be removed freely (as long as we
1192738fd1498Szrj are careful not to change the type of the overall expression).
1192838fd1498Szrj We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
1192938fd1498Szrj but we can constant-fold them if they have constant operands. */
1193038fd1498Szrj
1193138fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1193238fd1498Szrj # define fold(x) fold_1 (x)
1193338fd1498Szrj static tree fold_1 (tree);
1193438fd1498Szrj static
1193538fd1498Szrj #endif
1193638fd1498Szrj tree
fold(tree expr)1193738fd1498Szrj fold (tree expr)
1193838fd1498Szrj {
1193938fd1498Szrj const tree t = expr;
1194038fd1498Szrj enum tree_code code = TREE_CODE (t);
1194138fd1498Szrj enum tree_code_class kind = TREE_CODE_CLASS (code);
1194238fd1498Szrj tree tem;
1194338fd1498Szrj location_t loc = EXPR_LOCATION (expr);
1194438fd1498Szrj
1194538fd1498Szrj /* Return right away if a constant. */
1194638fd1498Szrj if (kind == tcc_constant)
1194738fd1498Szrj return t;
1194838fd1498Szrj
1194938fd1498Szrj /* CALL_EXPR-like objects with variable numbers of operands are
1195038fd1498Szrj treated specially. */
1195138fd1498Szrj if (kind == tcc_vl_exp)
1195238fd1498Szrj {
1195338fd1498Szrj if (code == CALL_EXPR)
1195438fd1498Szrj {
1195538fd1498Szrj tem = fold_call_expr (loc, expr, false);
1195638fd1498Szrj return tem ? tem : expr;
1195738fd1498Szrj }
1195838fd1498Szrj return expr;
1195938fd1498Szrj }
1196038fd1498Szrj
1196138fd1498Szrj if (IS_EXPR_CODE_CLASS (kind))
1196238fd1498Szrj {
1196338fd1498Szrj tree type = TREE_TYPE (t);
1196438fd1498Szrj tree op0, op1, op2;
1196538fd1498Szrj
1196638fd1498Szrj switch (TREE_CODE_LENGTH (code))
1196738fd1498Szrj {
1196838fd1498Szrj case 1:
1196938fd1498Szrj op0 = TREE_OPERAND (t, 0);
1197038fd1498Szrj tem = fold_unary_loc (loc, code, type, op0);
1197138fd1498Szrj return tem ? tem : expr;
1197238fd1498Szrj case 2:
1197338fd1498Szrj op0 = TREE_OPERAND (t, 0);
1197438fd1498Szrj op1 = TREE_OPERAND (t, 1);
1197538fd1498Szrj tem = fold_binary_loc (loc, code, type, op0, op1);
1197638fd1498Szrj return tem ? tem : expr;
1197738fd1498Szrj case 3:
1197838fd1498Szrj op0 = TREE_OPERAND (t, 0);
1197938fd1498Szrj op1 = TREE_OPERAND (t, 1);
1198038fd1498Szrj op2 = TREE_OPERAND (t, 2);
1198138fd1498Szrj tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
1198238fd1498Szrj return tem ? tem : expr;
1198338fd1498Szrj default:
1198438fd1498Szrj break;
1198538fd1498Szrj }
1198638fd1498Szrj }
1198738fd1498Szrj
1198838fd1498Szrj switch (code)
1198938fd1498Szrj {
1199038fd1498Szrj case ARRAY_REF:
1199138fd1498Szrj {
1199238fd1498Szrj tree op0 = TREE_OPERAND (t, 0);
1199338fd1498Szrj tree op1 = TREE_OPERAND (t, 1);
1199438fd1498Szrj
1199538fd1498Szrj if (TREE_CODE (op1) == INTEGER_CST
1199638fd1498Szrj && TREE_CODE (op0) == CONSTRUCTOR
1199738fd1498Szrj && ! type_contains_placeholder_p (TREE_TYPE (op0)))
1199838fd1498Szrj {
1199938fd1498Szrj tree val = get_array_ctor_element_at_index (op0,
1200038fd1498Szrj wi::to_offset (op1));
1200138fd1498Szrj if (val)
1200238fd1498Szrj return val;
1200338fd1498Szrj }
1200438fd1498Szrj
1200538fd1498Szrj return t;
1200638fd1498Szrj }
1200738fd1498Szrj
1200838fd1498Szrj /* Return a VECTOR_CST if possible. */
1200938fd1498Szrj case CONSTRUCTOR:
1201038fd1498Szrj {
1201138fd1498Szrj tree type = TREE_TYPE (t);
1201238fd1498Szrj if (TREE_CODE (type) != VECTOR_TYPE)
1201338fd1498Szrj return t;
1201438fd1498Szrj
1201538fd1498Szrj unsigned i;
1201638fd1498Szrj tree val;
1201738fd1498Szrj FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
1201838fd1498Szrj if (! CONSTANT_CLASS_P (val))
1201938fd1498Szrj return t;
1202038fd1498Szrj
1202138fd1498Szrj return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
1202238fd1498Szrj }
1202338fd1498Szrj
1202438fd1498Szrj case CONST_DECL:
1202538fd1498Szrj return fold (DECL_INITIAL (t));
1202638fd1498Szrj
1202738fd1498Szrj default:
1202838fd1498Szrj return t;
1202938fd1498Szrj } /* switch (code) */
1203038fd1498Szrj }
1203138fd1498Szrj
1203238fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1203338fd1498Szrj #undef fold
1203438fd1498Szrj
1203538fd1498Szrj static void fold_checksum_tree (const_tree, struct md5_ctx *,
1203638fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > *);
1203738fd1498Szrj static void fold_check_failed (const_tree, const_tree);
1203838fd1498Szrj void print_fold_checksum (const_tree);
1203938fd1498Szrj
1204038fd1498Szrj /* When --enable-checking=fold, compute a digest of expr before
1204138fd1498Szrj and after actual fold call to see if fold did not accidentally
1204238fd1498Szrj change original expr. */
1204338fd1498Szrj
1204438fd1498Szrj tree
fold(tree expr)1204538fd1498Szrj fold (tree expr)
1204638fd1498Szrj {
1204738fd1498Szrj tree ret;
1204838fd1498Szrj struct md5_ctx ctx;
1204938fd1498Szrj unsigned char checksum_before[16], checksum_after[16];
1205038fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1205138fd1498Szrj
1205238fd1498Szrj md5_init_ctx (&ctx);
1205338fd1498Szrj fold_checksum_tree (expr, &ctx, &ht);
1205438fd1498Szrj md5_finish_ctx (&ctx, checksum_before);
1205538fd1498Szrj ht.empty ();
1205638fd1498Szrj
1205738fd1498Szrj ret = fold_1 (expr);
1205838fd1498Szrj
1205938fd1498Szrj md5_init_ctx (&ctx);
1206038fd1498Szrj fold_checksum_tree (expr, &ctx, &ht);
1206138fd1498Szrj md5_finish_ctx (&ctx, checksum_after);
1206238fd1498Szrj
1206338fd1498Szrj if (memcmp (checksum_before, checksum_after, 16))
1206438fd1498Szrj fold_check_failed (expr, ret);
1206538fd1498Szrj
1206638fd1498Szrj return ret;
1206738fd1498Szrj }
1206838fd1498Szrj
1206938fd1498Szrj void
print_fold_checksum(const_tree expr)1207038fd1498Szrj print_fold_checksum (const_tree expr)
1207138fd1498Szrj {
1207238fd1498Szrj struct md5_ctx ctx;
1207338fd1498Szrj unsigned char checksum[16], cnt;
1207438fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1207538fd1498Szrj
1207638fd1498Szrj md5_init_ctx (&ctx);
1207738fd1498Szrj fold_checksum_tree (expr, &ctx, &ht);
1207838fd1498Szrj md5_finish_ctx (&ctx, checksum);
1207938fd1498Szrj for (cnt = 0; cnt < 16; ++cnt)
1208038fd1498Szrj fprintf (stderr, "%02x", checksum[cnt]);
1208138fd1498Szrj putc ('\n', stderr);
1208238fd1498Szrj }
1208338fd1498Szrj
1208438fd1498Szrj static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)1208538fd1498Szrj fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
1208638fd1498Szrj {
1208738fd1498Szrj internal_error ("fold check: original tree changed by fold");
1208838fd1498Szrj }
1208938fd1498Szrj
1209038fd1498Szrj static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)1209138fd1498Szrj fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
1209238fd1498Szrj hash_table<nofree_ptr_hash <const tree_node> > *ht)
1209338fd1498Szrj {
1209438fd1498Szrj const tree_node **slot;
1209538fd1498Szrj enum tree_code code;
1209638fd1498Szrj union tree_node buf;
1209738fd1498Szrj int i, len;
1209838fd1498Szrj
1209938fd1498Szrj recursive_label:
1210038fd1498Szrj if (expr == NULL)
1210138fd1498Szrj return;
1210238fd1498Szrj slot = ht->find_slot (expr, INSERT);
1210338fd1498Szrj if (*slot != NULL)
1210438fd1498Szrj return;
1210538fd1498Szrj *slot = expr;
1210638fd1498Szrj code = TREE_CODE (expr);
1210738fd1498Szrj if (TREE_CODE_CLASS (code) == tcc_declaration
1210838fd1498Szrj && HAS_DECL_ASSEMBLER_NAME_P (expr))
1210938fd1498Szrj {
1211038fd1498Szrj /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
1211138fd1498Szrj memcpy ((char *) &buf, expr, tree_size (expr));
1211238fd1498Szrj SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
1211338fd1498Szrj buf.decl_with_vis.symtab_node = NULL;
1211438fd1498Szrj expr = (tree) &buf;
1211538fd1498Szrj }
1211638fd1498Szrj else if (TREE_CODE_CLASS (code) == tcc_type
1211738fd1498Szrj && (TYPE_POINTER_TO (expr)
1211838fd1498Szrj || TYPE_REFERENCE_TO (expr)
1211938fd1498Szrj || TYPE_CACHED_VALUES_P (expr)
1212038fd1498Szrj || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
1212138fd1498Szrj || TYPE_NEXT_VARIANT (expr)
1212238fd1498Szrj || TYPE_ALIAS_SET_KNOWN_P (expr)))
1212338fd1498Szrj {
1212438fd1498Szrj /* Allow these fields to be modified. */
1212538fd1498Szrj tree tmp;
1212638fd1498Szrj memcpy ((char *) &buf, expr, tree_size (expr));
1212738fd1498Szrj expr = tmp = (tree) &buf;
1212838fd1498Szrj TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
1212938fd1498Szrj TYPE_POINTER_TO (tmp) = NULL;
1213038fd1498Szrj TYPE_REFERENCE_TO (tmp) = NULL;
1213138fd1498Szrj TYPE_NEXT_VARIANT (tmp) = NULL;
1213238fd1498Szrj TYPE_ALIAS_SET (tmp) = -1;
1213338fd1498Szrj if (TYPE_CACHED_VALUES_P (tmp))
1213438fd1498Szrj {
1213538fd1498Szrj TYPE_CACHED_VALUES_P (tmp) = 0;
1213638fd1498Szrj TYPE_CACHED_VALUES (tmp) = NULL;
1213738fd1498Szrj }
1213838fd1498Szrj }
1213938fd1498Szrj md5_process_bytes (expr, tree_size (expr), ctx);
1214038fd1498Szrj if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1214138fd1498Szrj fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
1214238fd1498Szrj if (TREE_CODE_CLASS (code) != tcc_type
1214338fd1498Szrj && TREE_CODE_CLASS (code) != tcc_declaration
1214438fd1498Szrj && code != TREE_LIST
1214538fd1498Szrj && code != SSA_NAME
1214638fd1498Szrj && CODE_CONTAINS_STRUCT (code, TS_COMMON))
1214738fd1498Szrj fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
1214838fd1498Szrj switch (TREE_CODE_CLASS (code))
1214938fd1498Szrj {
1215038fd1498Szrj case tcc_constant:
1215138fd1498Szrj switch (code)
1215238fd1498Szrj {
1215338fd1498Szrj case STRING_CST:
1215438fd1498Szrj md5_process_bytes (TREE_STRING_POINTER (expr),
1215538fd1498Szrj TREE_STRING_LENGTH (expr), ctx);
1215638fd1498Szrj break;
1215738fd1498Szrj case COMPLEX_CST:
1215838fd1498Szrj fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
1215938fd1498Szrj fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
1216038fd1498Szrj break;
1216138fd1498Szrj case VECTOR_CST:
1216238fd1498Szrj len = vector_cst_encoded_nelts (expr);
1216338fd1498Szrj for (i = 0; i < len; ++i)
1216438fd1498Szrj fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
1216538fd1498Szrj break;
1216638fd1498Szrj default:
1216738fd1498Szrj break;
1216838fd1498Szrj }
1216938fd1498Szrj break;
1217038fd1498Szrj case tcc_exceptional:
1217138fd1498Szrj switch (code)
1217238fd1498Szrj {
1217338fd1498Szrj case TREE_LIST:
1217438fd1498Szrj fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
1217538fd1498Szrj fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
1217638fd1498Szrj expr = TREE_CHAIN (expr);
1217738fd1498Szrj goto recursive_label;
1217838fd1498Szrj break;
1217938fd1498Szrj case TREE_VEC:
1218038fd1498Szrj for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
1218138fd1498Szrj fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
1218238fd1498Szrj break;
1218338fd1498Szrj default:
1218438fd1498Szrj break;
1218538fd1498Szrj }
1218638fd1498Szrj break;
1218738fd1498Szrj case tcc_expression:
1218838fd1498Szrj case tcc_reference:
1218938fd1498Szrj case tcc_comparison:
1219038fd1498Szrj case tcc_unary:
1219138fd1498Szrj case tcc_binary:
1219238fd1498Szrj case tcc_statement:
1219338fd1498Szrj case tcc_vl_exp:
1219438fd1498Szrj len = TREE_OPERAND_LENGTH (expr);
1219538fd1498Szrj for (i = 0; i < len; ++i)
1219638fd1498Szrj fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
1219738fd1498Szrj break;
1219838fd1498Szrj case tcc_declaration:
1219938fd1498Szrj fold_checksum_tree (DECL_NAME (expr), ctx, ht);
1220038fd1498Szrj fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
1220138fd1498Szrj if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
1220238fd1498Szrj {
1220338fd1498Szrj fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
1220438fd1498Szrj fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
1220538fd1498Szrj fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
1220638fd1498Szrj fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
1220738fd1498Szrj fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
1220838fd1498Szrj }
1220938fd1498Szrj
1221038fd1498Szrj if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
1221138fd1498Szrj {
1221238fd1498Szrj if (TREE_CODE (expr) == FUNCTION_DECL)
1221338fd1498Szrj {
1221438fd1498Szrj fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
1221538fd1498Szrj fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
1221638fd1498Szrj }
1221738fd1498Szrj fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
1221838fd1498Szrj }
1221938fd1498Szrj break;
1222038fd1498Szrj case tcc_type:
1222138fd1498Szrj if (TREE_CODE (expr) == ENUMERAL_TYPE)
1222238fd1498Szrj fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
1222338fd1498Szrj fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
1222438fd1498Szrj fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
1222538fd1498Szrj fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
1222638fd1498Szrj fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
1222738fd1498Szrj if (INTEGRAL_TYPE_P (expr)
1222838fd1498Szrj || SCALAR_FLOAT_TYPE_P (expr))
1222938fd1498Szrj {
1223038fd1498Szrj fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
1223138fd1498Szrj fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
1223238fd1498Szrj }
1223338fd1498Szrj fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
1223438fd1498Szrj if (TREE_CODE (expr) == RECORD_TYPE
1223538fd1498Szrj || TREE_CODE (expr) == UNION_TYPE
1223638fd1498Szrj || TREE_CODE (expr) == QUAL_UNION_TYPE)
1223738fd1498Szrj fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
1223838fd1498Szrj fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
1223938fd1498Szrj break;
1224038fd1498Szrj default:
1224138fd1498Szrj break;
1224238fd1498Szrj }
1224338fd1498Szrj }
1224438fd1498Szrj
1224538fd1498Szrj /* Helper function for outputting the checksum of a tree T. When
1224638fd1498Szrj debugging with gdb, you can "define mynext" to be "next" followed
1224738fd1498Szrj by "call debug_fold_checksum (op0)", then just trace down till the
1224838fd1498Szrj outputs differ. */
1224938fd1498Szrj
1225038fd1498Szrj DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)1225138fd1498Szrj debug_fold_checksum (const_tree t)
1225238fd1498Szrj {
1225338fd1498Szrj int i;
1225438fd1498Szrj unsigned char checksum[16];
1225538fd1498Szrj struct md5_ctx ctx;
1225638fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1225738fd1498Szrj
1225838fd1498Szrj md5_init_ctx (&ctx);
1225938fd1498Szrj fold_checksum_tree (t, &ctx, &ht);
1226038fd1498Szrj md5_finish_ctx (&ctx, checksum);
1226138fd1498Szrj ht.empty ();
1226238fd1498Szrj
1226338fd1498Szrj for (i = 0; i < 16; i++)
1226438fd1498Szrj fprintf (stderr, "%d ", checksum[i]);
1226538fd1498Szrj
1226638fd1498Szrj fprintf (stderr, "\n");
1226738fd1498Szrj }
1226838fd1498Szrj
1226938fd1498Szrj #endif
1227038fd1498Szrj
1227138fd1498Szrj /* Fold a unary tree expression with code CODE of type TYPE with an
1227238fd1498Szrj operand OP0. LOC is the location of the resulting expression.
1227338fd1498Szrj Return a folded expression if successful. Otherwise, return a tree
1227438fd1498Szrj expression with code CODE of type TYPE with an operand OP0. */
1227538fd1498Szrj
1227638fd1498Szrj tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)1227738fd1498Szrj fold_build1_loc (location_t loc,
1227838fd1498Szrj enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
1227938fd1498Szrj {
1228038fd1498Szrj tree tem;
1228138fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1228238fd1498Szrj unsigned char checksum_before[16], checksum_after[16];
1228338fd1498Szrj struct md5_ctx ctx;
1228438fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1228538fd1498Szrj
1228638fd1498Szrj md5_init_ctx (&ctx);
1228738fd1498Szrj fold_checksum_tree (op0, &ctx, &ht);
1228838fd1498Szrj md5_finish_ctx (&ctx, checksum_before);
1228938fd1498Szrj ht.empty ();
1229038fd1498Szrj #endif
1229138fd1498Szrj
1229238fd1498Szrj tem = fold_unary_loc (loc, code, type, op0);
1229338fd1498Szrj if (!tem)
1229438fd1498Szrj tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
1229538fd1498Szrj
1229638fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1229738fd1498Szrj md5_init_ctx (&ctx);
1229838fd1498Szrj fold_checksum_tree (op0, &ctx, &ht);
1229938fd1498Szrj md5_finish_ctx (&ctx, checksum_after);
1230038fd1498Szrj
1230138fd1498Szrj if (memcmp (checksum_before, checksum_after, 16))
1230238fd1498Szrj fold_check_failed (op0, tem);
1230338fd1498Szrj #endif
1230438fd1498Szrj return tem;
1230538fd1498Szrj }
1230638fd1498Szrj
1230738fd1498Szrj /* Fold a binary tree expression with code CODE of type TYPE with
1230838fd1498Szrj operands OP0 and OP1. LOC is the location of the resulting
1230938fd1498Szrj expression. Return a folded expression if successful. Otherwise,
1231038fd1498Szrj return a tree expression with code CODE of type TYPE with operands
1231138fd1498Szrj OP0 and OP1. */
1231238fd1498Szrj
1231338fd1498Szrj tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)1231438fd1498Szrj fold_build2_loc (location_t loc,
1231538fd1498Szrj enum tree_code code, tree type, tree op0, tree op1
1231638fd1498Szrj MEM_STAT_DECL)
1231738fd1498Szrj {
1231838fd1498Szrj tree tem;
1231938fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1232038fd1498Szrj unsigned char checksum_before_op0[16],
1232138fd1498Szrj checksum_before_op1[16],
1232238fd1498Szrj checksum_after_op0[16],
1232338fd1498Szrj checksum_after_op1[16];
1232438fd1498Szrj struct md5_ctx ctx;
1232538fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1232638fd1498Szrj
1232738fd1498Szrj md5_init_ctx (&ctx);
1232838fd1498Szrj fold_checksum_tree (op0, &ctx, &ht);
1232938fd1498Szrj md5_finish_ctx (&ctx, checksum_before_op0);
1233038fd1498Szrj ht.empty ();
1233138fd1498Szrj
1233238fd1498Szrj md5_init_ctx (&ctx);
1233338fd1498Szrj fold_checksum_tree (op1, &ctx, &ht);
1233438fd1498Szrj md5_finish_ctx (&ctx, checksum_before_op1);
1233538fd1498Szrj ht.empty ();
1233638fd1498Szrj #endif
1233738fd1498Szrj
1233838fd1498Szrj tem = fold_binary_loc (loc, code, type, op0, op1);
1233938fd1498Szrj if (!tem)
1234038fd1498Szrj tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
1234138fd1498Szrj
1234238fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1234338fd1498Szrj md5_init_ctx (&ctx);
1234438fd1498Szrj fold_checksum_tree (op0, &ctx, &ht);
1234538fd1498Szrj md5_finish_ctx (&ctx, checksum_after_op0);
1234638fd1498Szrj ht.empty ();
1234738fd1498Szrj
1234838fd1498Szrj if (memcmp (checksum_before_op0, checksum_after_op0, 16))
1234938fd1498Szrj fold_check_failed (op0, tem);
1235038fd1498Szrj
1235138fd1498Szrj md5_init_ctx (&ctx);
1235238fd1498Szrj fold_checksum_tree (op1, &ctx, &ht);
1235338fd1498Szrj md5_finish_ctx (&ctx, checksum_after_op1);
1235438fd1498Szrj
1235538fd1498Szrj if (memcmp (checksum_before_op1, checksum_after_op1, 16))
1235638fd1498Szrj fold_check_failed (op1, tem);
1235738fd1498Szrj #endif
1235838fd1498Szrj return tem;
1235938fd1498Szrj }
1236038fd1498Szrj
1236138fd1498Szrj /* Fold a ternary tree expression with code CODE of type TYPE with
1236238fd1498Szrj operands OP0, OP1, and OP2. Return a folded expression if
1236338fd1498Szrj successful. Otherwise, return a tree expression with code CODE of
1236438fd1498Szrj type TYPE with operands OP0, OP1, and OP2. */
1236538fd1498Szrj
1236638fd1498Szrj tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)1236738fd1498Szrj fold_build3_loc (location_t loc, enum tree_code code, tree type,
1236838fd1498Szrj tree op0, tree op1, tree op2 MEM_STAT_DECL)
1236938fd1498Szrj {
1237038fd1498Szrj tree tem;
1237138fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1237238fd1498Szrj unsigned char checksum_before_op0[16],
1237338fd1498Szrj checksum_before_op1[16],
1237438fd1498Szrj checksum_before_op2[16],
1237538fd1498Szrj checksum_after_op0[16],
1237638fd1498Szrj checksum_after_op1[16],
1237738fd1498Szrj checksum_after_op2[16];
1237838fd1498Szrj struct md5_ctx ctx;
1237938fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1238038fd1498Szrj
1238138fd1498Szrj md5_init_ctx (&ctx);
1238238fd1498Szrj fold_checksum_tree (op0, &ctx, &ht);
1238338fd1498Szrj md5_finish_ctx (&ctx, checksum_before_op0);
1238438fd1498Szrj ht.empty ();
1238538fd1498Szrj
1238638fd1498Szrj md5_init_ctx (&ctx);
1238738fd1498Szrj fold_checksum_tree (op1, &ctx, &ht);
1238838fd1498Szrj md5_finish_ctx (&ctx, checksum_before_op1);
1238938fd1498Szrj ht.empty ();
1239038fd1498Szrj
1239138fd1498Szrj md5_init_ctx (&ctx);
1239238fd1498Szrj fold_checksum_tree (op2, &ctx, &ht);
1239338fd1498Szrj md5_finish_ctx (&ctx, checksum_before_op2);
1239438fd1498Szrj ht.empty ();
1239538fd1498Szrj #endif
1239638fd1498Szrj
1239738fd1498Szrj gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
1239838fd1498Szrj tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
1239938fd1498Szrj if (!tem)
1240038fd1498Szrj tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
1240138fd1498Szrj
1240238fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1240338fd1498Szrj md5_init_ctx (&ctx);
1240438fd1498Szrj fold_checksum_tree (op0, &ctx, &ht);
1240538fd1498Szrj md5_finish_ctx (&ctx, checksum_after_op0);
1240638fd1498Szrj ht.empty ();
1240738fd1498Szrj
1240838fd1498Szrj if (memcmp (checksum_before_op0, checksum_after_op0, 16))
1240938fd1498Szrj fold_check_failed (op0, tem);
1241038fd1498Szrj
1241138fd1498Szrj md5_init_ctx (&ctx);
1241238fd1498Szrj fold_checksum_tree (op1, &ctx, &ht);
1241338fd1498Szrj md5_finish_ctx (&ctx, checksum_after_op1);
1241438fd1498Szrj ht.empty ();
1241538fd1498Szrj
1241638fd1498Szrj if (memcmp (checksum_before_op1, checksum_after_op1, 16))
1241738fd1498Szrj fold_check_failed (op1, tem);
1241838fd1498Szrj
1241938fd1498Szrj md5_init_ctx (&ctx);
1242038fd1498Szrj fold_checksum_tree (op2, &ctx, &ht);
1242138fd1498Szrj md5_finish_ctx (&ctx, checksum_after_op2);
1242238fd1498Szrj
1242338fd1498Szrj if (memcmp (checksum_before_op2, checksum_after_op2, 16))
1242438fd1498Szrj fold_check_failed (op2, tem);
1242538fd1498Szrj #endif
1242638fd1498Szrj return tem;
1242738fd1498Szrj }
1242838fd1498Szrj
1242938fd1498Szrj /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
1243038fd1498Szrj arguments in ARGARRAY, and a null static chain.
1243138fd1498Szrj Return a folded expression if successful. Otherwise, return a CALL_EXPR
1243238fd1498Szrj of type TYPE from the given operands as constructed by build_call_array. */
1243338fd1498Szrj
1243438fd1498Szrj tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)1243538fd1498Szrj fold_build_call_array_loc (location_t loc, tree type, tree fn,
1243638fd1498Szrj int nargs, tree *argarray)
1243738fd1498Szrj {
1243838fd1498Szrj tree tem;
1243938fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1244038fd1498Szrj unsigned char checksum_before_fn[16],
1244138fd1498Szrj checksum_before_arglist[16],
1244238fd1498Szrj checksum_after_fn[16],
1244338fd1498Szrj checksum_after_arglist[16];
1244438fd1498Szrj struct md5_ctx ctx;
1244538fd1498Szrj hash_table<nofree_ptr_hash<const tree_node> > ht (32);
1244638fd1498Szrj int i;
1244738fd1498Szrj
1244838fd1498Szrj md5_init_ctx (&ctx);
1244938fd1498Szrj fold_checksum_tree (fn, &ctx, &ht);
1245038fd1498Szrj md5_finish_ctx (&ctx, checksum_before_fn);
1245138fd1498Szrj ht.empty ();
1245238fd1498Szrj
1245338fd1498Szrj md5_init_ctx (&ctx);
1245438fd1498Szrj for (i = 0; i < nargs; i++)
1245538fd1498Szrj fold_checksum_tree (argarray[i], &ctx, &ht);
1245638fd1498Szrj md5_finish_ctx (&ctx, checksum_before_arglist);
1245738fd1498Szrj ht.empty ();
1245838fd1498Szrj #endif
1245938fd1498Szrj
1246038fd1498Szrj tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
1246138fd1498Szrj if (!tem)
1246238fd1498Szrj tem = build_call_array_loc (loc, type, fn, nargs, argarray);
1246338fd1498Szrj
1246438fd1498Szrj #ifdef ENABLE_FOLD_CHECKING
1246538fd1498Szrj md5_init_ctx (&ctx);
1246638fd1498Szrj fold_checksum_tree (fn, &ctx, &ht);
1246738fd1498Szrj md5_finish_ctx (&ctx, checksum_after_fn);
1246838fd1498Szrj ht.empty ();
1246938fd1498Szrj
1247038fd1498Szrj if (memcmp (checksum_before_fn, checksum_after_fn, 16))
1247138fd1498Szrj fold_check_failed (fn, tem);
1247238fd1498Szrj
1247338fd1498Szrj md5_init_ctx (&ctx);
1247438fd1498Szrj for (i = 0; i < nargs; i++)
1247538fd1498Szrj fold_checksum_tree (argarray[i], &ctx, &ht);
1247638fd1498Szrj md5_finish_ctx (&ctx, checksum_after_arglist);
1247738fd1498Szrj
1247838fd1498Szrj if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
1247938fd1498Szrj fold_check_failed (NULL_TREE, tem);
1248038fd1498Szrj #endif
1248138fd1498Szrj return tem;
1248238fd1498Szrj }
1248338fd1498Szrj
1248438fd1498Szrj /* Perform constant folding and related simplification of initializer
1248538fd1498Szrj expression EXPR. These behave identically to "fold_buildN" but ignore
1248638fd1498Szrj potential run-time traps and exceptions that fold must preserve. */
1248738fd1498Szrj
1248838fd1498Szrj #define START_FOLD_INIT \
1248938fd1498Szrj int saved_signaling_nans = flag_signaling_nans;\
1249038fd1498Szrj int saved_trapping_math = flag_trapping_math;\
1249138fd1498Szrj int saved_rounding_math = flag_rounding_math;\
1249238fd1498Szrj int saved_trapv = flag_trapv;\
1249338fd1498Szrj int saved_folding_initializer = folding_initializer;\
1249438fd1498Szrj flag_signaling_nans = 0;\
1249538fd1498Szrj flag_trapping_math = 0;\
1249638fd1498Szrj flag_rounding_math = 0;\
1249738fd1498Szrj flag_trapv = 0;\
1249838fd1498Szrj folding_initializer = 1;
1249938fd1498Szrj
1250038fd1498Szrj #define END_FOLD_INIT \
1250138fd1498Szrj flag_signaling_nans = saved_signaling_nans;\
1250238fd1498Szrj flag_trapping_math = saved_trapping_math;\
1250338fd1498Szrj flag_rounding_math = saved_rounding_math;\
1250438fd1498Szrj flag_trapv = saved_trapv;\
1250538fd1498Szrj folding_initializer = saved_folding_initializer;
1250638fd1498Szrj
1250738fd1498Szrj tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)1250838fd1498Szrj fold_build1_initializer_loc (location_t loc, enum tree_code code,
1250938fd1498Szrj tree type, tree op)
1251038fd1498Szrj {
1251138fd1498Szrj tree result;
1251238fd1498Szrj START_FOLD_INIT;
1251338fd1498Szrj
1251438fd1498Szrj result = fold_build1_loc (loc, code, type, op);
1251538fd1498Szrj
1251638fd1498Szrj END_FOLD_INIT;
1251738fd1498Szrj return result;
1251838fd1498Szrj }
1251938fd1498Szrj
1252038fd1498Szrj tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)1252138fd1498Szrj fold_build2_initializer_loc (location_t loc, enum tree_code code,
1252238fd1498Szrj tree type, tree op0, tree op1)
1252338fd1498Szrj {
1252438fd1498Szrj tree result;
1252538fd1498Szrj START_FOLD_INIT;
1252638fd1498Szrj
1252738fd1498Szrj result = fold_build2_loc (loc, code, type, op0, op1);
1252838fd1498Szrj
1252938fd1498Szrj END_FOLD_INIT;
1253038fd1498Szrj return result;
1253138fd1498Szrj }
1253238fd1498Szrj
1253338fd1498Szrj tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)1253438fd1498Szrj fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
1253538fd1498Szrj int nargs, tree *argarray)
1253638fd1498Szrj {
1253738fd1498Szrj tree result;
1253838fd1498Szrj START_FOLD_INIT;
1253938fd1498Szrj
1254038fd1498Szrj result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
1254138fd1498Szrj
1254238fd1498Szrj END_FOLD_INIT;
1254338fd1498Szrj return result;
1254438fd1498Szrj }
1254538fd1498Szrj
1254638fd1498Szrj #undef START_FOLD_INIT
1254738fd1498Szrj #undef END_FOLD_INIT
1254838fd1498Szrj
1254938fd1498Szrj /* Determine if first argument is a multiple of second argument. Return 0 if
1255038fd1498Szrj it is not, or we cannot easily determined it to be.
1255138fd1498Szrj
1255238fd1498Szrj An example of the sort of thing we care about (at this point; this routine
1255338fd1498Szrj could surely be made more general, and expanded to do what the *_DIV_EXPR's
1255438fd1498Szrj fold cases do now) is discovering that
1255538fd1498Szrj
1255638fd1498Szrj SAVE_EXPR (I) * SAVE_EXPR (J * 8)
1255738fd1498Szrj
1255838fd1498Szrj is a multiple of
1255938fd1498Szrj
1256038fd1498Szrj SAVE_EXPR (J * 8)
1256138fd1498Szrj
1256238fd1498Szrj when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
1256338fd1498Szrj
1256438fd1498Szrj This code also handles discovering that
1256538fd1498Szrj
1256638fd1498Szrj SAVE_EXPR (I) * SAVE_EXPR (J * 8)
1256738fd1498Szrj
1256838fd1498Szrj is a multiple of 8 so we don't have to worry about dealing with a
1256938fd1498Szrj possible remainder.
1257038fd1498Szrj
1257138fd1498Szrj Note that we *look* inside a SAVE_EXPR only to determine how it was
1257238fd1498Szrj calculated; it is not safe for fold to do much of anything else with the
1257338fd1498Szrj internals of a SAVE_EXPR, since it cannot know when it will be evaluated
1257438fd1498Szrj at run time. For example, the latter example above *cannot* be implemented
1257538fd1498Szrj as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
1257638fd1498Szrj evaluation time of the original SAVE_EXPR is not necessarily the same at
1257738fd1498Szrj the time the new expression is evaluated. The only optimization of this
1257838fd1498Szrj sort that would be valid is changing
1257938fd1498Szrj
1258038fd1498Szrj SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
1258138fd1498Szrj
1258238fd1498Szrj divided by 8 to
1258338fd1498Szrj
1258438fd1498Szrj SAVE_EXPR (I) * SAVE_EXPR (J)
1258538fd1498Szrj
1258638fd1498Szrj (where the same SAVE_EXPR (J) is used in the original and the
1258738fd1498Szrj transformed version). */
1258838fd1498Szrj
1258938fd1498Szrj int
multiple_of_p(tree type,const_tree top,const_tree bottom)1259038fd1498Szrj multiple_of_p (tree type, const_tree top, const_tree bottom)
1259138fd1498Szrj {
1259238fd1498Szrj gimple *stmt;
1259338fd1498Szrj tree t1, op1, op2;
1259438fd1498Szrj
1259538fd1498Szrj if (operand_equal_p (top, bottom, 0))
1259638fd1498Szrj return 1;
1259738fd1498Szrj
1259838fd1498Szrj if (TREE_CODE (type) != INTEGER_TYPE)
1259938fd1498Szrj return 0;
1260038fd1498Szrj
1260138fd1498Szrj switch (TREE_CODE (top))
1260238fd1498Szrj {
1260338fd1498Szrj case BIT_AND_EXPR:
1260438fd1498Szrj /* Bitwise and provides a power of two multiple. If the mask is
1260538fd1498Szrj a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
1260638fd1498Szrj if (!integer_pow2p (bottom))
1260738fd1498Szrj return 0;
1260838fd1498Szrj return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
1260938fd1498Szrj || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
1261038fd1498Szrj
1261138fd1498Szrj case MULT_EXPR:
1261238fd1498Szrj if (TREE_CODE (bottom) == INTEGER_CST)
1261338fd1498Szrj {
1261438fd1498Szrj op1 = TREE_OPERAND (top, 0);
1261538fd1498Szrj op2 = TREE_OPERAND (top, 1);
1261638fd1498Szrj if (TREE_CODE (op1) == INTEGER_CST)
1261738fd1498Szrj std::swap (op1, op2);
1261838fd1498Szrj if (TREE_CODE (op2) == INTEGER_CST)
1261938fd1498Szrj {
1262038fd1498Szrj if (multiple_of_p (type, op2, bottom))
1262138fd1498Szrj return 1;
1262238fd1498Szrj /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
1262338fd1498Szrj if (multiple_of_p (type, bottom, op2))
1262438fd1498Szrj {
1262538fd1498Szrj widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
1262638fd1498Szrj wi::to_widest (op2));
1262738fd1498Szrj if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
1262838fd1498Szrj {
1262938fd1498Szrj op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
1263038fd1498Szrj return multiple_of_p (type, op1, op2);
1263138fd1498Szrj }
1263238fd1498Szrj }
1263338fd1498Szrj return multiple_of_p (type, op1, bottom);
1263438fd1498Szrj }
1263538fd1498Szrj }
1263638fd1498Szrj return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
1263738fd1498Szrj || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
1263838fd1498Szrj
1263938fd1498Szrj case MINUS_EXPR:
1264038fd1498Szrj /* It is impossible to prove if op0 - op1 is multiple of bottom
1264138fd1498Szrj precisely, so be conservative here checking if both op0 and op1
1264238fd1498Szrj are multiple of bottom. Note we check the second operand first
1264338fd1498Szrj since it's usually simpler. */
1264438fd1498Szrj return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
1264538fd1498Szrj && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
1264638fd1498Szrj
1264738fd1498Szrj case PLUS_EXPR:
1264838fd1498Szrj /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
1264938fd1498Szrj as op0 - 3 if the expression has unsigned type. For example,
1265038fd1498Szrj (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
1265138fd1498Szrj op1 = TREE_OPERAND (top, 1);
1265238fd1498Szrj if (TYPE_UNSIGNED (type)
1265338fd1498Szrj && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
1265438fd1498Szrj op1 = fold_build1 (NEGATE_EXPR, type, op1);
1265538fd1498Szrj return (multiple_of_p (type, op1, bottom)
1265638fd1498Szrj && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
1265738fd1498Szrj
1265838fd1498Szrj case LSHIFT_EXPR:
1265938fd1498Szrj if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
1266038fd1498Szrj {
1266138fd1498Szrj op1 = TREE_OPERAND (top, 1);
1266238fd1498Szrj /* const_binop may not detect overflow correctly,
1266338fd1498Szrj so check for it explicitly here. */
1266438fd1498Szrj if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
1266538fd1498Szrj wi::to_wide (op1))
1266638fd1498Szrj && (t1 = fold_convert (type,
1266738fd1498Szrj const_binop (LSHIFT_EXPR, size_one_node,
1266838fd1498Szrj op1))) != 0
1266938fd1498Szrj && !TREE_OVERFLOW (t1))
1267038fd1498Szrj return multiple_of_p (type, t1, bottom);
1267138fd1498Szrj }
1267238fd1498Szrj return 0;
1267338fd1498Szrj
1267438fd1498Szrj case NOP_EXPR:
1267538fd1498Szrj /* Can't handle conversions from non-integral or wider integral type. */
1267638fd1498Szrj if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
1267738fd1498Szrj || (TYPE_PRECISION (type)
1267838fd1498Szrj < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
1267938fd1498Szrj return 0;
1268038fd1498Szrj
1268138fd1498Szrj /* fall through */
1268238fd1498Szrj
1268338fd1498Szrj case SAVE_EXPR:
1268438fd1498Szrj return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
1268538fd1498Szrj
1268638fd1498Szrj case COND_EXPR:
1268738fd1498Szrj return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
1268838fd1498Szrj && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
1268938fd1498Szrj
1269038fd1498Szrj case INTEGER_CST:
1269138fd1498Szrj if (TREE_CODE (bottom) != INTEGER_CST
1269238fd1498Szrj || integer_zerop (bottom)
1269338fd1498Szrj || (TYPE_UNSIGNED (type)
1269438fd1498Szrj && (tree_int_cst_sgn (top) < 0
1269538fd1498Szrj || tree_int_cst_sgn (bottom) < 0)))
1269638fd1498Szrj return 0;
1269738fd1498Szrj return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
1269838fd1498Szrj SIGNED);
1269938fd1498Szrj
1270038fd1498Szrj case SSA_NAME:
1270138fd1498Szrj if (TREE_CODE (bottom) == INTEGER_CST
1270238fd1498Szrj && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
1270338fd1498Szrj && gimple_code (stmt) == GIMPLE_ASSIGN)
1270438fd1498Szrj {
1270538fd1498Szrj enum tree_code code = gimple_assign_rhs_code (stmt);
1270638fd1498Szrj
1270738fd1498Szrj /* Check for special cases to see if top is defined as multiple
1270838fd1498Szrj of bottom:
1270938fd1498Szrj
1271038fd1498Szrj top = (X & ~(bottom - 1) ; bottom is power of 2
1271138fd1498Szrj
1271238fd1498Szrj or
1271338fd1498Szrj
1271438fd1498Szrj Y = X % bottom
1271538fd1498Szrj top = X - Y. */
1271638fd1498Szrj if (code == BIT_AND_EXPR
1271738fd1498Szrj && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
1271838fd1498Szrj && TREE_CODE (op2) == INTEGER_CST
1271938fd1498Szrj && integer_pow2p (bottom)
1272038fd1498Szrj && wi::multiple_of_p (wi::to_widest (op2),
1272138fd1498Szrj wi::to_widest (bottom), UNSIGNED))
1272238fd1498Szrj return 1;
1272338fd1498Szrj
1272438fd1498Szrj op1 = gimple_assign_rhs1 (stmt);
1272538fd1498Szrj if (code == MINUS_EXPR
1272638fd1498Szrj && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
1272738fd1498Szrj && TREE_CODE (op2) == SSA_NAME
1272838fd1498Szrj && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
1272938fd1498Szrj && gimple_code (stmt) == GIMPLE_ASSIGN
1273038fd1498Szrj && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
1273138fd1498Szrj && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
1273238fd1498Szrj && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
1273338fd1498Szrj return 1;
1273438fd1498Szrj }
1273538fd1498Szrj
1273638fd1498Szrj /* fall through */
1273738fd1498Szrj
1273838fd1498Szrj default:
1273938fd1498Szrj if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
1274038fd1498Szrj return multiple_p (wi::to_poly_widest (top),
1274138fd1498Szrj wi::to_poly_widest (bottom));
1274238fd1498Szrj
1274338fd1498Szrj return 0;
1274438fd1498Szrj }
1274538fd1498Szrj }
1274638fd1498Szrj
1274738fd1498Szrj #define tree_expr_nonnegative_warnv_p(X, Y) \
1274838fd1498Szrj _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
1274938fd1498Szrj
1275038fd1498Szrj #define RECURSE(X) \
1275138fd1498Szrj ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
1275238fd1498Szrj
1275338fd1498Szrj /* Return true if CODE or TYPE is known to be non-negative. */
1275438fd1498Szrj
1275538fd1498Szrj static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)1275638fd1498Szrj tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
1275738fd1498Szrj {
1275838fd1498Szrj if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
1275938fd1498Szrj && truth_value_p (code))
1276038fd1498Szrj /* Truth values evaluate to 0 or 1, which is nonnegative unless we
1276138fd1498Szrj have a signed:1 type (where the value is -1 and 0). */
1276238fd1498Szrj return true;
1276338fd1498Szrj return false;
1276438fd1498Szrj }
1276538fd1498Szrj
1276638fd1498Szrj /* Return true if (CODE OP0) is known to be non-negative. If the return
1276738fd1498Szrj value is based on the assumption that signed overflow is undefined,
1276838fd1498Szrj set *STRICT_OVERFLOW_P to true; otherwise, don't change
1276938fd1498Szrj *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
1277038fd1498Szrj
1277138fd1498Szrj bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)1277238fd1498Szrj tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
1277338fd1498Szrj bool *strict_overflow_p, int depth)
1277438fd1498Szrj {
1277538fd1498Szrj if (TYPE_UNSIGNED (type))
1277638fd1498Szrj return true;
1277738fd1498Szrj
1277838fd1498Szrj switch (code)
1277938fd1498Szrj {
1278038fd1498Szrj case ABS_EXPR:
1278138fd1498Szrj /* We can't return 1 if flag_wrapv is set because
1278238fd1498Szrj ABS_EXPR<INT_MIN> = INT_MIN. */
1278338fd1498Szrj if (!ANY_INTEGRAL_TYPE_P (type))
1278438fd1498Szrj return true;
1278538fd1498Szrj if (TYPE_OVERFLOW_UNDEFINED (type))
1278638fd1498Szrj {
1278738fd1498Szrj *strict_overflow_p = true;
1278838fd1498Szrj return true;
1278938fd1498Szrj }
1279038fd1498Szrj break;
1279138fd1498Szrj
1279238fd1498Szrj case NON_LVALUE_EXPR:
1279338fd1498Szrj case FLOAT_EXPR:
1279438fd1498Szrj case FIX_TRUNC_EXPR:
1279538fd1498Szrj return RECURSE (op0);
1279638fd1498Szrj
1279738fd1498Szrj CASE_CONVERT:
1279838fd1498Szrj {
1279938fd1498Szrj tree inner_type = TREE_TYPE (op0);
1280038fd1498Szrj tree outer_type = type;
1280138fd1498Szrj
1280238fd1498Szrj if (TREE_CODE (outer_type) == REAL_TYPE)
1280338fd1498Szrj {
1280438fd1498Szrj if (TREE_CODE (inner_type) == REAL_TYPE)
1280538fd1498Szrj return RECURSE (op0);
1280638fd1498Szrj if (INTEGRAL_TYPE_P (inner_type))
1280738fd1498Szrj {
1280838fd1498Szrj if (TYPE_UNSIGNED (inner_type))
1280938fd1498Szrj return true;
1281038fd1498Szrj return RECURSE (op0);
1281138fd1498Szrj }
1281238fd1498Szrj }
1281338fd1498Szrj else if (INTEGRAL_TYPE_P (outer_type))
1281438fd1498Szrj {
1281538fd1498Szrj if (TREE_CODE (inner_type) == REAL_TYPE)
1281638fd1498Szrj return RECURSE (op0);
1281738fd1498Szrj if (INTEGRAL_TYPE_P (inner_type))
1281838fd1498Szrj return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
1281938fd1498Szrj && TYPE_UNSIGNED (inner_type);
1282038fd1498Szrj }
1282138fd1498Szrj }
1282238fd1498Szrj break;
1282338fd1498Szrj
1282438fd1498Szrj default:
1282538fd1498Szrj return tree_simple_nonnegative_warnv_p (code, type);
1282638fd1498Szrj }
1282738fd1498Szrj
1282838fd1498Szrj /* We don't know sign of `t', so be conservative and return false. */
1282938fd1498Szrj return false;
1283038fd1498Szrj }
1283138fd1498Szrj
1283238fd1498Szrj /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
1283338fd1498Szrj value is based on the assumption that signed overflow is undefined,
1283438fd1498Szrj set *STRICT_OVERFLOW_P to true; otherwise, don't change
1283538fd1498Szrj *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
1283638fd1498Szrj
1283738fd1498Szrj bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)1283838fd1498Szrj tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
1283938fd1498Szrj tree op1, bool *strict_overflow_p,
1284038fd1498Szrj int depth)
1284138fd1498Szrj {
1284238fd1498Szrj if (TYPE_UNSIGNED (type))
1284338fd1498Szrj return true;
1284438fd1498Szrj
1284538fd1498Szrj switch (code)
1284638fd1498Szrj {
1284738fd1498Szrj case POINTER_PLUS_EXPR:
1284838fd1498Szrj case PLUS_EXPR:
1284938fd1498Szrj if (FLOAT_TYPE_P (type))
1285038fd1498Szrj return RECURSE (op0) && RECURSE (op1);
1285138fd1498Szrj
1285238fd1498Szrj /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
1285338fd1498Szrj both unsigned and at least 2 bits shorter than the result. */
1285438fd1498Szrj if (TREE_CODE (type) == INTEGER_TYPE
1285538fd1498Szrj && TREE_CODE (op0) == NOP_EXPR
1285638fd1498Szrj && TREE_CODE (op1) == NOP_EXPR)
1285738fd1498Szrj {
1285838fd1498Szrj tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
1285938fd1498Szrj tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
1286038fd1498Szrj if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
1286138fd1498Szrj && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
1286238fd1498Szrj {
1286338fd1498Szrj unsigned int prec = MAX (TYPE_PRECISION (inner1),
1286438fd1498Szrj TYPE_PRECISION (inner2)) + 1;
1286538fd1498Szrj return prec < TYPE_PRECISION (type);
1286638fd1498Szrj }
1286738fd1498Szrj }
1286838fd1498Szrj break;
1286938fd1498Szrj
1287038fd1498Szrj case MULT_EXPR:
1287138fd1498Szrj if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1287238fd1498Szrj {
1287338fd1498Szrj /* x * x is always non-negative for floating point x
1287438fd1498Szrj or without overflow. */
1287538fd1498Szrj if (operand_equal_p (op0, op1, 0)
1287638fd1498Szrj || (RECURSE (op0) && RECURSE (op1)))
1287738fd1498Szrj {
1287838fd1498Szrj if (ANY_INTEGRAL_TYPE_P (type)
1287938fd1498Szrj && TYPE_OVERFLOW_UNDEFINED (type))
1288038fd1498Szrj *strict_overflow_p = true;
1288138fd1498Szrj return true;
1288238fd1498Szrj }
1288338fd1498Szrj }
1288438fd1498Szrj
1288538fd1498Szrj /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
1288638fd1498Szrj both unsigned and their total bits is shorter than the result. */
1288738fd1498Szrj if (TREE_CODE (type) == INTEGER_TYPE
1288838fd1498Szrj && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
1288938fd1498Szrj && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
1289038fd1498Szrj {
1289138fd1498Szrj tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
1289238fd1498Szrj ? TREE_TYPE (TREE_OPERAND (op0, 0))
1289338fd1498Szrj : TREE_TYPE (op0);
1289438fd1498Szrj tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
1289538fd1498Szrj ? TREE_TYPE (TREE_OPERAND (op1, 0))
1289638fd1498Szrj : TREE_TYPE (op1);
1289738fd1498Szrj
1289838fd1498Szrj bool unsigned0 = TYPE_UNSIGNED (inner0);
1289938fd1498Szrj bool unsigned1 = TYPE_UNSIGNED (inner1);
1290038fd1498Szrj
1290138fd1498Szrj if (TREE_CODE (op0) == INTEGER_CST)
1290238fd1498Szrj unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
1290338fd1498Szrj
1290438fd1498Szrj if (TREE_CODE (op1) == INTEGER_CST)
1290538fd1498Szrj unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
1290638fd1498Szrj
1290738fd1498Szrj if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
1290838fd1498Szrj && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
1290938fd1498Szrj {
1291038fd1498Szrj unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
1291138fd1498Szrj ? tree_int_cst_min_precision (op0, UNSIGNED)
1291238fd1498Szrj : TYPE_PRECISION (inner0);
1291338fd1498Szrj
1291438fd1498Szrj unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
1291538fd1498Szrj ? tree_int_cst_min_precision (op1, UNSIGNED)
1291638fd1498Szrj : TYPE_PRECISION (inner1);
1291738fd1498Szrj
1291838fd1498Szrj return precision0 + precision1 < TYPE_PRECISION (type);
1291938fd1498Szrj }
1292038fd1498Szrj }
1292138fd1498Szrj return false;
1292238fd1498Szrj
1292338fd1498Szrj case BIT_AND_EXPR:
1292438fd1498Szrj case MAX_EXPR:
1292538fd1498Szrj return RECURSE (op0) || RECURSE (op1);
1292638fd1498Szrj
1292738fd1498Szrj case BIT_IOR_EXPR:
1292838fd1498Szrj case BIT_XOR_EXPR:
1292938fd1498Szrj case MIN_EXPR:
1293038fd1498Szrj case RDIV_EXPR:
1293138fd1498Szrj case TRUNC_DIV_EXPR:
1293238fd1498Szrj case CEIL_DIV_EXPR:
1293338fd1498Szrj case FLOOR_DIV_EXPR:
1293438fd1498Szrj case ROUND_DIV_EXPR:
1293538fd1498Szrj return RECURSE (op0) && RECURSE (op1);
1293638fd1498Szrj
1293738fd1498Szrj case TRUNC_MOD_EXPR:
1293838fd1498Szrj return RECURSE (op0);
1293938fd1498Szrj
1294038fd1498Szrj case FLOOR_MOD_EXPR:
1294138fd1498Szrj return RECURSE (op1);
1294238fd1498Szrj
1294338fd1498Szrj case CEIL_MOD_EXPR:
1294438fd1498Szrj case ROUND_MOD_EXPR:
1294538fd1498Szrj default:
1294638fd1498Szrj return tree_simple_nonnegative_warnv_p (code, type);
1294738fd1498Szrj }
1294838fd1498Szrj
1294938fd1498Szrj /* We don't know sign of `t', so be conservative and return false. */
1295038fd1498Szrj return false;
1295138fd1498Szrj }
1295238fd1498Szrj
1295338fd1498Szrj /* Return true if T is known to be non-negative. If the return
1295438fd1498Szrj value is based on the assumption that signed overflow is undefined,
1295538fd1498Szrj set *STRICT_OVERFLOW_P to true; otherwise, don't change
1295638fd1498Szrj *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
1295738fd1498Szrj
1295838fd1498Szrj bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)1295938fd1498Szrj tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
1296038fd1498Szrj {
1296138fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (t)))
1296238fd1498Szrj return true;
1296338fd1498Szrj
1296438fd1498Szrj switch (TREE_CODE (t))
1296538fd1498Szrj {
1296638fd1498Szrj case INTEGER_CST:
1296738fd1498Szrj return tree_int_cst_sgn (t) >= 0;
1296838fd1498Szrj
1296938fd1498Szrj case REAL_CST:
1297038fd1498Szrj return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1297138fd1498Szrj
1297238fd1498Szrj case FIXED_CST:
1297338fd1498Szrj return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
1297438fd1498Szrj
1297538fd1498Szrj case COND_EXPR:
1297638fd1498Szrj return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
1297738fd1498Szrj
1297838fd1498Szrj case SSA_NAME:
1297938fd1498Szrj /* Limit the depth of recursion to avoid quadratic behavior.
1298038fd1498Szrj This is expected to catch almost all occurrences in practice.
1298138fd1498Szrj If this code misses important cases that unbounded recursion
1298238fd1498Szrj would not, passes that need this information could be revised
1298338fd1498Szrj to provide it through dataflow propagation. */
1298438fd1498Szrj return (!name_registered_for_update_p (t)
1298538fd1498Szrj && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
1298638fd1498Szrj && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
1298738fd1498Szrj strict_overflow_p, depth));
1298838fd1498Szrj
1298938fd1498Szrj default:
1299038fd1498Szrj return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
1299138fd1498Szrj }
1299238fd1498Szrj }
1299338fd1498Szrj
1299438fd1498Szrj /* Return true if T is known to be non-negative. If the return
1299538fd1498Szrj value is based on the assumption that signed overflow is undefined,
1299638fd1498Szrj set *STRICT_OVERFLOW_P to true; otherwise, don't change
1299738fd1498Szrj *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
1299838fd1498Szrj
1299938fd1498Szrj bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)1300038fd1498Szrj tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
1300138fd1498Szrj bool *strict_overflow_p, int depth)
1300238fd1498Szrj {
1300338fd1498Szrj switch (fn)
1300438fd1498Szrj {
1300538fd1498Szrj CASE_CFN_ACOS:
1300638fd1498Szrj CASE_CFN_ACOSH:
1300738fd1498Szrj CASE_CFN_CABS:
1300838fd1498Szrj CASE_CFN_COSH:
1300938fd1498Szrj CASE_CFN_ERFC:
1301038fd1498Szrj CASE_CFN_EXP:
1301138fd1498Szrj CASE_CFN_EXP10:
1301238fd1498Szrj CASE_CFN_EXP2:
1301338fd1498Szrj CASE_CFN_FABS:
1301438fd1498Szrj CASE_CFN_FDIM:
1301538fd1498Szrj CASE_CFN_HYPOT:
1301638fd1498Szrj CASE_CFN_POW10:
1301738fd1498Szrj CASE_CFN_FFS:
1301838fd1498Szrj CASE_CFN_PARITY:
1301938fd1498Szrj CASE_CFN_POPCOUNT:
1302038fd1498Szrj CASE_CFN_CLZ:
1302138fd1498Szrj CASE_CFN_CLRSB:
1302238fd1498Szrj case CFN_BUILT_IN_BSWAP32:
1302338fd1498Szrj case CFN_BUILT_IN_BSWAP64:
1302438fd1498Szrj /* Always true. */
1302538fd1498Szrj return true;
1302638fd1498Szrj
1302738fd1498Szrj CASE_CFN_SQRT:
1302838fd1498Szrj CASE_CFN_SQRT_FN:
1302938fd1498Szrj /* sqrt(-0.0) is -0.0. */
1303038fd1498Szrj if (!HONOR_SIGNED_ZEROS (element_mode (type)))
1303138fd1498Szrj return true;
1303238fd1498Szrj return RECURSE (arg0);
1303338fd1498Szrj
1303438fd1498Szrj CASE_CFN_ASINH:
1303538fd1498Szrj CASE_CFN_ATAN:
1303638fd1498Szrj CASE_CFN_ATANH:
1303738fd1498Szrj CASE_CFN_CBRT:
1303838fd1498Szrj CASE_CFN_CEIL:
1303938fd1498Szrj CASE_CFN_CEIL_FN:
1304038fd1498Szrj CASE_CFN_ERF:
1304138fd1498Szrj CASE_CFN_EXPM1:
1304238fd1498Szrj CASE_CFN_FLOOR:
1304338fd1498Szrj CASE_CFN_FLOOR_FN:
1304438fd1498Szrj CASE_CFN_FMOD:
1304538fd1498Szrj CASE_CFN_FREXP:
1304638fd1498Szrj CASE_CFN_ICEIL:
1304738fd1498Szrj CASE_CFN_IFLOOR:
1304838fd1498Szrj CASE_CFN_IRINT:
1304938fd1498Szrj CASE_CFN_IROUND:
1305038fd1498Szrj CASE_CFN_LCEIL:
1305138fd1498Szrj CASE_CFN_LDEXP:
1305238fd1498Szrj CASE_CFN_LFLOOR:
1305338fd1498Szrj CASE_CFN_LLCEIL:
1305438fd1498Szrj CASE_CFN_LLFLOOR:
1305538fd1498Szrj CASE_CFN_LLRINT:
1305638fd1498Szrj CASE_CFN_LLROUND:
1305738fd1498Szrj CASE_CFN_LRINT:
1305838fd1498Szrj CASE_CFN_LROUND:
1305938fd1498Szrj CASE_CFN_MODF:
1306038fd1498Szrj CASE_CFN_NEARBYINT:
1306138fd1498Szrj CASE_CFN_NEARBYINT_FN:
1306238fd1498Szrj CASE_CFN_RINT:
1306338fd1498Szrj CASE_CFN_RINT_FN:
1306438fd1498Szrj CASE_CFN_ROUND:
1306538fd1498Szrj CASE_CFN_ROUND_FN:
1306638fd1498Szrj CASE_CFN_SCALB:
1306738fd1498Szrj CASE_CFN_SCALBLN:
1306838fd1498Szrj CASE_CFN_SCALBN:
1306938fd1498Szrj CASE_CFN_SIGNBIT:
1307038fd1498Szrj CASE_CFN_SIGNIFICAND:
1307138fd1498Szrj CASE_CFN_SINH:
1307238fd1498Szrj CASE_CFN_TANH:
1307338fd1498Szrj CASE_CFN_TRUNC:
1307438fd1498Szrj CASE_CFN_TRUNC_FN:
1307538fd1498Szrj /* True if the 1st argument is nonnegative. */
1307638fd1498Szrj return RECURSE (arg0);
1307738fd1498Szrj
1307838fd1498Szrj CASE_CFN_FMAX:
1307938fd1498Szrj CASE_CFN_FMAX_FN:
1308038fd1498Szrj /* True if the 1st OR 2nd arguments are nonnegative. */
1308138fd1498Szrj return RECURSE (arg0) || RECURSE (arg1);
1308238fd1498Szrj
1308338fd1498Szrj CASE_CFN_FMIN:
1308438fd1498Szrj CASE_CFN_FMIN_FN:
1308538fd1498Szrj /* True if the 1st AND 2nd arguments are nonnegative. */
1308638fd1498Szrj return RECURSE (arg0) && RECURSE (arg1);
1308738fd1498Szrj
1308838fd1498Szrj CASE_CFN_COPYSIGN:
1308938fd1498Szrj CASE_CFN_COPYSIGN_FN:
1309038fd1498Szrj /* True if the 2nd argument is nonnegative. */
1309138fd1498Szrj return RECURSE (arg1);
1309238fd1498Szrj
1309338fd1498Szrj CASE_CFN_POWI:
1309438fd1498Szrj /* True if the 1st argument is nonnegative or the second
1309538fd1498Szrj argument is an even integer. */
1309638fd1498Szrj if (TREE_CODE (arg1) == INTEGER_CST
1309738fd1498Szrj && (TREE_INT_CST_LOW (arg1) & 1) == 0)
1309838fd1498Szrj return true;
1309938fd1498Szrj return RECURSE (arg0);
1310038fd1498Szrj
1310138fd1498Szrj CASE_CFN_POW:
1310238fd1498Szrj /* True if the 1st argument is nonnegative or the second
1310338fd1498Szrj argument is an even integer valued real. */
1310438fd1498Szrj if (TREE_CODE (arg1) == REAL_CST)
1310538fd1498Szrj {
1310638fd1498Szrj REAL_VALUE_TYPE c;
1310738fd1498Szrj HOST_WIDE_INT n;
1310838fd1498Szrj
1310938fd1498Szrj c = TREE_REAL_CST (arg1);
1311038fd1498Szrj n = real_to_integer (&c);
1311138fd1498Szrj if ((n & 1) == 0)
1311238fd1498Szrj {
1311338fd1498Szrj REAL_VALUE_TYPE cint;
1311438fd1498Szrj real_from_integer (&cint, VOIDmode, n, SIGNED);
1311538fd1498Szrj if (real_identical (&c, &cint))
1311638fd1498Szrj return true;
1311738fd1498Szrj }
1311838fd1498Szrj }
1311938fd1498Szrj return RECURSE (arg0);
1312038fd1498Szrj
1312138fd1498Szrj default:
1312238fd1498Szrj break;
1312338fd1498Szrj }
1312438fd1498Szrj return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
1312538fd1498Szrj }
1312638fd1498Szrj
1312738fd1498Szrj /* Return true if T is known to be non-negative. If the return
1312838fd1498Szrj value is based on the assumption that signed overflow is undefined,
1312938fd1498Szrj set *STRICT_OVERFLOW_P to true; otherwise, don't change
1313038fd1498Szrj *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
1313138fd1498Szrj
1313238fd1498Szrj static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)1313338fd1498Szrj tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
1313438fd1498Szrj {
1313538fd1498Szrj enum tree_code code = TREE_CODE (t);
1313638fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (t)))
1313738fd1498Szrj return true;
1313838fd1498Szrj
1313938fd1498Szrj switch (code)
1314038fd1498Szrj {
1314138fd1498Szrj case TARGET_EXPR:
1314238fd1498Szrj {
1314338fd1498Szrj tree temp = TARGET_EXPR_SLOT (t);
1314438fd1498Szrj t = TARGET_EXPR_INITIAL (t);
1314538fd1498Szrj
1314638fd1498Szrj /* If the initializer is non-void, then it's a normal expression
1314738fd1498Szrj that will be assigned to the slot. */
1314838fd1498Szrj if (!VOID_TYPE_P (t))
1314938fd1498Szrj return RECURSE (t);
1315038fd1498Szrj
1315138fd1498Szrj /* Otherwise, the initializer sets the slot in some way. One common
1315238fd1498Szrj way is an assignment statement at the end of the initializer. */
1315338fd1498Szrj while (1)
1315438fd1498Szrj {
1315538fd1498Szrj if (TREE_CODE (t) == BIND_EXPR)
1315638fd1498Szrj t = expr_last (BIND_EXPR_BODY (t));
1315738fd1498Szrj else if (TREE_CODE (t) == TRY_FINALLY_EXPR
1315838fd1498Szrj || TREE_CODE (t) == TRY_CATCH_EXPR)
1315938fd1498Szrj t = expr_last (TREE_OPERAND (t, 0));
1316038fd1498Szrj else if (TREE_CODE (t) == STATEMENT_LIST)
1316138fd1498Szrj t = expr_last (t);
1316238fd1498Szrj else
1316338fd1498Szrj break;
1316438fd1498Szrj }
1316538fd1498Szrj if (TREE_CODE (t) == MODIFY_EXPR
1316638fd1498Szrj && TREE_OPERAND (t, 0) == temp)
1316738fd1498Szrj return RECURSE (TREE_OPERAND (t, 1));
1316838fd1498Szrj
1316938fd1498Szrj return false;
1317038fd1498Szrj }
1317138fd1498Szrj
1317238fd1498Szrj case CALL_EXPR:
1317338fd1498Szrj {
1317438fd1498Szrj tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
1317538fd1498Szrj tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
1317638fd1498Szrj
1317738fd1498Szrj return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
1317838fd1498Szrj get_call_combined_fn (t),
1317938fd1498Szrj arg0,
1318038fd1498Szrj arg1,
1318138fd1498Szrj strict_overflow_p, depth);
1318238fd1498Szrj }
1318338fd1498Szrj case COMPOUND_EXPR:
1318438fd1498Szrj case MODIFY_EXPR:
1318538fd1498Szrj return RECURSE (TREE_OPERAND (t, 1));
1318638fd1498Szrj
1318738fd1498Szrj case BIND_EXPR:
1318838fd1498Szrj return RECURSE (expr_last (TREE_OPERAND (t, 1)));
1318938fd1498Szrj
1319038fd1498Szrj case SAVE_EXPR:
1319138fd1498Szrj return RECURSE (TREE_OPERAND (t, 0));
1319238fd1498Szrj
1319338fd1498Szrj default:
1319438fd1498Szrj return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
1319538fd1498Szrj }
1319638fd1498Szrj }
1319738fd1498Szrj
1319838fd1498Szrj #undef RECURSE
1319938fd1498Szrj #undef tree_expr_nonnegative_warnv_p
1320038fd1498Szrj
1320138fd1498Szrj /* Return true if T is known to be non-negative. If the return
1320238fd1498Szrj value is based on the assumption that signed overflow is undefined,
1320338fd1498Szrj set *STRICT_OVERFLOW_P to true; otherwise, don't change
1320438fd1498Szrj *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
1320538fd1498Szrj
1320638fd1498Szrj bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)1320738fd1498Szrj tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
1320838fd1498Szrj {
1320938fd1498Szrj enum tree_code code;
1321038fd1498Szrj if (t == error_mark_node)
1321138fd1498Szrj return false;
1321238fd1498Szrj
1321338fd1498Szrj code = TREE_CODE (t);
1321438fd1498Szrj switch (TREE_CODE_CLASS (code))
1321538fd1498Szrj {
1321638fd1498Szrj case tcc_binary:
1321738fd1498Szrj case tcc_comparison:
1321838fd1498Szrj return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
1321938fd1498Szrj TREE_TYPE (t),
1322038fd1498Szrj TREE_OPERAND (t, 0),
1322138fd1498Szrj TREE_OPERAND (t, 1),
1322238fd1498Szrj strict_overflow_p, depth);
1322338fd1498Szrj
1322438fd1498Szrj case tcc_unary:
1322538fd1498Szrj return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
1322638fd1498Szrj TREE_TYPE (t),
1322738fd1498Szrj TREE_OPERAND (t, 0),
1322838fd1498Szrj strict_overflow_p, depth);
1322938fd1498Szrj
1323038fd1498Szrj case tcc_constant:
1323138fd1498Szrj case tcc_declaration:
1323238fd1498Szrj case tcc_reference:
1323338fd1498Szrj return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
1323438fd1498Szrj
1323538fd1498Szrj default:
1323638fd1498Szrj break;
1323738fd1498Szrj }
1323838fd1498Szrj
1323938fd1498Szrj switch (code)
1324038fd1498Szrj {
1324138fd1498Szrj case TRUTH_AND_EXPR:
1324238fd1498Szrj case TRUTH_OR_EXPR:
1324338fd1498Szrj case TRUTH_XOR_EXPR:
1324438fd1498Szrj return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
1324538fd1498Szrj TREE_TYPE (t),
1324638fd1498Szrj TREE_OPERAND (t, 0),
1324738fd1498Szrj TREE_OPERAND (t, 1),
1324838fd1498Szrj strict_overflow_p, depth);
1324938fd1498Szrj case TRUTH_NOT_EXPR:
1325038fd1498Szrj return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
1325138fd1498Szrj TREE_TYPE (t),
1325238fd1498Szrj TREE_OPERAND (t, 0),
1325338fd1498Szrj strict_overflow_p, depth);
1325438fd1498Szrj
1325538fd1498Szrj case COND_EXPR:
1325638fd1498Szrj case CONSTRUCTOR:
1325738fd1498Szrj case OBJ_TYPE_REF:
1325838fd1498Szrj case ASSERT_EXPR:
1325938fd1498Szrj case ADDR_EXPR:
1326038fd1498Szrj case WITH_SIZE_EXPR:
1326138fd1498Szrj case SSA_NAME:
1326238fd1498Szrj return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
1326338fd1498Szrj
1326438fd1498Szrj default:
1326538fd1498Szrj return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
1326638fd1498Szrj }
1326738fd1498Szrj }
1326838fd1498Szrj
1326938fd1498Szrj /* Return true if `t' is known to be non-negative. Handle warnings
1327038fd1498Szrj about undefined signed overflow. */
1327138fd1498Szrj
1327238fd1498Szrj bool
tree_expr_nonnegative_p(tree t)1327338fd1498Szrj tree_expr_nonnegative_p (tree t)
1327438fd1498Szrj {
1327538fd1498Szrj bool ret, strict_overflow_p;
1327638fd1498Szrj
1327738fd1498Szrj strict_overflow_p = false;
1327838fd1498Szrj ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
1327938fd1498Szrj if (strict_overflow_p)
1328038fd1498Szrj fold_overflow_warning (("assuming signed overflow does not occur when "
1328138fd1498Szrj "determining that expression is always "
1328238fd1498Szrj "non-negative"),
1328338fd1498Szrj WARN_STRICT_OVERFLOW_MISC);
1328438fd1498Szrj return ret;
1328538fd1498Szrj }
1328638fd1498Szrj
1328738fd1498Szrj
1328838fd1498Szrj /* Return true when (CODE OP0) is an address and is known to be nonzero.
1328938fd1498Szrj For floating point we further ensure that T is not denormal.
1329038fd1498Szrj Similar logic is present in nonzero_address in rtlanal.h.
1329138fd1498Szrj
1329238fd1498Szrj If the return value is based on the assumption that signed overflow
1329338fd1498Szrj is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
1329438fd1498Szrj change *STRICT_OVERFLOW_P. */
1329538fd1498Szrj
1329638fd1498Szrj bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)1329738fd1498Szrj tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
1329838fd1498Szrj bool *strict_overflow_p)
1329938fd1498Szrj {
1330038fd1498Szrj switch (code)
1330138fd1498Szrj {
1330238fd1498Szrj case ABS_EXPR:
1330338fd1498Szrj return tree_expr_nonzero_warnv_p (op0,
1330438fd1498Szrj strict_overflow_p);
1330538fd1498Szrj
1330638fd1498Szrj case NOP_EXPR:
1330738fd1498Szrj {
1330838fd1498Szrj tree inner_type = TREE_TYPE (op0);
1330938fd1498Szrj tree outer_type = type;
1331038fd1498Szrj
1331138fd1498Szrj return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
1331238fd1498Szrj && tree_expr_nonzero_warnv_p (op0,
1331338fd1498Szrj strict_overflow_p));
1331438fd1498Szrj }
1331538fd1498Szrj break;
1331638fd1498Szrj
1331738fd1498Szrj case NON_LVALUE_EXPR:
1331838fd1498Szrj return tree_expr_nonzero_warnv_p (op0,
1331938fd1498Szrj strict_overflow_p);
1332038fd1498Szrj
1332138fd1498Szrj default:
1332238fd1498Szrj break;
1332338fd1498Szrj }
1332438fd1498Szrj
1332538fd1498Szrj return false;
1332638fd1498Szrj }
1332738fd1498Szrj
1332838fd1498Szrj /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
1332938fd1498Szrj For floating point we further ensure that T is not denormal.
1333038fd1498Szrj Similar logic is present in nonzero_address in rtlanal.h.
1333138fd1498Szrj
1333238fd1498Szrj If the return value is based on the assumption that signed overflow
1333338fd1498Szrj is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
1333438fd1498Szrj change *STRICT_OVERFLOW_P. */
1333538fd1498Szrj
1333638fd1498Szrj bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)1333738fd1498Szrj tree_binary_nonzero_warnv_p (enum tree_code code,
1333838fd1498Szrj tree type,
1333938fd1498Szrj tree op0,
1334038fd1498Szrj tree op1, bool *strict_overflow_p)
1334138fd1498Szrj {
1334238fd1498Szrj bool sub_strict_overflow_p;
1334338fd1498Szrj switch (code)
1334438fd1498Szrj {
1334538fd1498Szrj case POINTER_PLUS_EXPR:
1334638fd1498Szrj case PLUS_EXPR:
1334738fd1498Szrj if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
1334838fd1498Szrj {
1334938fd1498Szrj /* With the presence of negative values it is hard
1335038fd1498Szrj to say something. */
1335138fd1498Szrj sub_strict_overflow_p = false;
1335238fd1498Szrj if (!tree_expr_nonnegative_warnv_p (op0,
1335338fd1498Szrj &sub_strict_overflow_p)
1335438fd1498Szrj || !tree_expr_nonnegative_warnv_p (op1,
1335538fd1498Szrj &sub_strict_overflow_p))
1335638fd1498Szrj return false;
1335738fd1498Szrj /* One of operands must be positive and the other non-negative. */
1335838fd1498Szrj /* We don't set *STRICT_OVERFLOW_P here: even if this value
1335938fd1498Szrj overflows, on a twos-complement machine the sum of two
1336038fd1498Szrj nonnegative numbers can never be zero. */
1336138fd1498Szrj return (tree_expr_nonzero_warnv_p (op0,
1336238fd1498Szrj strict_overflow_p)
1336338fd1498Szrj || tree_expr_nonzero_warnv_p (op1,
1336438fd1498Szrj strict_overflow_p));
1336538fd1498Szrj }
1336638fd1498Szrj break;
1336738fd1498Szrj
1336838fd1498Szrj case MULT_EXPR:
1336938fd1498Szrj if (TYPE_OVERFLOW_UNDEFINED (type))
1337038fd1498Szrj {
1337138fd1498Szrj if (tree_expr_nonzero_warnv_p (op0,
1337238fd1498Szrj strict_overflow_p)
1337338fd1498Szrj && tree_expr_nonzero_warnv_p (op1,
1337438fd1498Szrj strict_overflow_p))
1337538fd1498Szrj {
1337638fd1498Szrj *strict_overflow_p = true;
1337738fd1498Szrj return true;
1337838fd1498Szrj }
1337938fd1498Szrj }
1338038fd1498Szrj break;
1338138fd1498Szrj
1338238fd1498Szrj case MIN_EXPR:
1338338fd1498Szrj sub_strict_overflow_p = false;
1338438fd1498Szrj if (tree_expr_nonzero_warnv_p (op0,
1338538fd1498Szrj &sub_strict_overflow_p)
1338638fd1498Szrj && tree_expr_nonzero_warnv_p (op1,
1338738fd1498Szrj &sub_strict_overflow_p))
1338838fd1498Szrj {
1338938fd1498Szrj if (sub_strict_overflow_p)
1339038fd1498Szrj *strict_overflow_p = true;
1339138fd1498Szrj }
1339238fd1498Szrj break;
1339338fd1498Szrj
1339438fd1498Szrj case MAX_EXPR:
1339538fd1498Szrj sub_strict_overflow_p = false;
1339638fd1498Szrj if (tree_expr_nonzero_warnv_p (op0,
1339738fd1498Szrj &sub_strict_overflow_p))
1339838fd1498Szrj {
1339938fd1498Szrj if (sub_strict_overflow_p)
1340038fd1498Szrj *strict_overflow_p = true;
1340138fd1498Szrj
1340238fd1498Szrj /* When both operands are nonzero, then MAX must be too. */
1340338fd1498Szrj if (tree_expr_nonzero_warnv_p (op1,
1340438fd1498Szrj strict_overflow_p))
1340538fd1498Szrj return true;
1340638fd1498Szrj
1340738fd1498Szrj /* MAX where operand 0 is positive is positive. */
1340838fd1498Szrj return tree_expr_nonnegative_warnv_p (op0,
1340938fd1498Szrj strict_overflow_p);
1341038fd1498Szrj }
1341138fd1498Szrj /* MAX where operand 1 is positive is positive. */
1341238fd1498Szrj else if (tree_expr_nonzero_warnv_p (op1,
1341338fd1498Szrj &sub_strict_overflow_p)
1341438fd1498Szrj && tree_expr_nonnegative_warnv_p (op1,
1341538fd1498Szrj &sub_strict_overflow_p))
1341638fd1498Szrj {
1341738fd1498Szrj if (sub_strict_overflow_p)
1341838fd1498Szrj *strict_overflow_p = true;
1341938fd1498Szrj return true;
1342038fd1498Szrj }
1342138fd1498Szrj break;
1342238fd1498Szrj
1342338fd1498Szrj case BIT_IOR_EXPR:
1342438fd1498Szrj return (tree_expr_nonzero_warnv_p (op1,
1342538fd1498Szrj strict_overflow_p)
1342638fd1498Szrj || tree_expr_nonzero_warnv_p (op0,
1342738fd1498Szrj strict_overflow_p));
1342838fd1498Szrj
1342938fd1498Szrj default:
1343038fd1498Szrj break;
1343138fd1498Szrj }
1343238fd1498Szrj
1343338fd1498Szrj return false;
1343438fd1498Szrj }
1343538fd1498Szrj
1343638fd1498Szrj /* Return true when T is an address and is known to be nonzero.
1343738fd1498Szrj For floating point we further ensure that T is not denormal.
1343838fd1498Szrj Similar logic is present in nonzero_address in rtlanal.h.
1343938fd1498Szrj
1344038fd1498Szrj If the return value is based on the assumption that signed overflow
1344138fd1498Szrj is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
1344238fd1498Szrj change *STRICT_OVERFLOW_P. */
1344338fd1498Szrj
1344438fd1498Szrj bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)1344538fd1498Szrj tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
1344638fd1498Szrj {
1344738fd1498Szrj bool sub_strict_overflow_p;
1344838fd1498Szrj switch (TREE_CODE (t))
1344938fd1498Szrj {
1345038fd1498Szrj case INTEGER_CST:
1345138fd1498Szrj return !integer_zerop (t);
1345238fd1498Szrj
1345338fd1498Szrj case ADDR_EXPR:
1345438fd1498Szrj {
1345538fd1498Szrj tree base = TREE_OPERAND (t, 0);
1345638fd1498Szrj
1345738fd1498Szrj if (!DECL_P (base))
1345838fd1498Szrj base = get_base_address (base);
1345938fd1498Szrj
1346038fd1498Szrj if (base && TREE_CODE (base) == TARGET_EXPR)
1346138fd1498Szrj base = TARGET_EXPR_SLOT (base);
1346238fd1498Szrj
1346338fd1498Szrj if (!base)
1346438fd1498Szrj return false;
1346538fd1498Szrj
1346638fd1498Szrj /* For objects in symbol table check if we know they are non-zero.
1346738fd1498Szrj Don't do anything for variables and functions before symtab is built;
1346838fd1498Szrj it is quite possible that they will be declared weak later. */
1346938fd1498Szrj int nonzero_addr = maybe_nonzero_address (base);
1347038fd1498Szrj if (nonzero_addr >= 0)
1347138fd1498Szrj return nonzero_addr;
1347238fd1498Szrj
1347338fd1498Szrj /* Constants are never weak. */
1347438fd1498Szrj if (CONSTANT_CLASS_P (base))
1347538fd1498Szrj return true;
1347638fd1498Szrj
1347738fd1498Szrj return false;
1347838fd1498Szrj }
1347938fd1498Szrj
1348038fd1498Szrj case COND_EXPR:
1348138fd1498Szrj sub_strict_overflow_p = false;
1348238fd1498Szrj if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
1348338fd1498Szrj &sub_strict_overflow_p)
1348438fd1498Szrj && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
1348538fd1498Szrj &sub_strict_overflow_p))
1348638fd1498Szrj {
1348738fd1498Szrj if (sub_strict_overflow_p)
1348838fd1498Szrj *strict_overflow_p = true;
1348938fd1498Szrj return true;
1349038fd1498Szrj }
1349138fd1498Szrj break;
1349238fd1498Szrj
1349338fd1498Szrj case SSA_NAME:
1349438fd1498Szrj if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
1349538fd1498Szrj break;
1349638fd1498Szrj return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
1349738fd1498Szrj
1349838fd1498Szrj default:
1349938fd1498Szrj break;
1350038fd1498Szrj }
1350138fd1498Szrj return false;
1350238fd1498Szrj }
1350338fd1498Szrj
1350438fd1498Szrj #define integer_valued_real_p(X) \
1350538fd1498Szrj _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
1350638fd1498Szrj
1350738fd1498Szrj #define RECURSE(X) \
1350838fd1498Szrj ((integer_valued_real_p) (X, depth + 1))
1350938fd1498Szrj
1351038fd1498Szrj /* Return true if the floating point result of (CODE OP0) has an
1351138fd1498Szrj integer value. We also allow +Inf, -Inf and NaN to be considered
1351238fd1498Szrj integer values. Return false for signaling NaN.
1351338fd1498Szrj
1351438fd1498Szrj DEPTH is the current nesting depth of the query. */
1351538fd1498Szrj
1351638fd1498Szrj bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)1351738fd1498Szrj integer_valued_real_unary_p (tree_code code, tree op0, int depth)
1351838fd1498Szrj {
1351938fd1498Szrj switch (code)
1352038fd1498Szrj {
1352138fd1498Szrj case FLOAT_EXPR:
1352238fd1498Szrj return true;
1352338fd1498Szrj
1352438fd1498Szrj case ABS_EXPR:
1352538fd1498Szrj return RECURSE (op0);
1352638fd1498Szrj
1352738fd1498Szrj CASE_CONVERT:
1352838fd1498Szrj {
1352938fd1498Szrj tree type = TREE_TYPE (op0);
1353038fd1498Szrj if (TREE_CODE (type) == INTEGER_TYPE)
1353138fd1498Szrj return true;
1353238fd1498Szrj if (TREE_CODE (type) == REAL_TYPE)
1353338fd1498Szrj return RECURSE (op0);
1353438fd1498Szrj break;
1353538fd1498Szrj }
1353638fd1498Szrj
1353738fd1498Szrj default:
1353838fd1498Szrj break;
1353938fd1498Szrj }
1354038fd1498Szrj return false;
1354138fd1498Szrj }
1354238fd1498Szrj
1354338fd1498Szrj /* Return true if the floating point result of (CODE OP0 OP1) has an
1354438fd1498Szrj integer value. We also allow +Inf, -Inf and NaN to be considered
1354538fd1498Szrj integer values. Return false for signaling NaN.
1354638fd1498Szrj
1354738fd1498Szrj DEPTH is the current nesting depth of the query. */
1354838fd1498Szrj
1354938fd1498Szrj bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)1355038fd1498Szrj integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
1355138fd1498Szrj {
1355238fd1498Szrj switch (code)
1355338fd1498Szrj {
1355438fd1498Szrj case PLUS_EXPR:
1355538fd1498Szrj case MINUS_EXPR:
1355638fd1498Szrj case MULT_EXPR:
1355738fd1498Szrj case MIN_EXPR:
1355838fd1498Szrj case MAX_EXPR:
1355938fd1498Szrj return RECURSE (op0) && RECURSE (op1);
1356038fd1498Szrj
1356138fd1498Szrj default:
1356238fd1498Szrj break;
1356338fd1498Szrj }
1356438fd1498Szrj return false;
1356538fd1498Szrj }
1356638fd1498Szrj
1356738fd1498Szrj /* Return true if the floating point result of calling FNDECL with arguments
1356838fd1498Szrj ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
1356938fd1498Szrj considered integer values. Return false for signaling NaN. If FNDECL
1357038fd1498Szrj takes fewer than 2 arguments, the remaining ARGn are null.
1357138fd1498Szrj
1357238fd1498Szrj DEPTH is the current nesting depth of the query. */
1357338fd1498Szrj
1357438fd1498Szrj bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)1357538fd1498Szrj integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
1357638fd1498Szrj {
1357738fd1498Szrj switch (fn)
1357838fd1498Szrj {
1357938fd1498Szrj CASE_CFN_CEIL:
1358038fd1498Szrj CASE_CFN_CEIL_FN:
1358138fd1498Szrj CASE_CFN_FLOOR:
1358238fd1498Szrj CASE_CFN_FLOOR_FN:
1358338fd1498Szrj CASE_CFN_NEARBYINT:
1358438fd1498Szrj CASE_CFN_NEARBYINT_FN:
1358538fd1498Szrj CASE_CFN_RINT:
1358638fd1498Szrj CASE_CFN_RINT_FN:
1358738fd1498Szrj CASE_CFN_ROUND:
1358838fd1498Szrj CASE_CFN_ROUND_FN:
1358938fd1498Szrj CASE_CFN_TRUNC:
1359038fd1498Szrj CASE_CFN_TRUNC_FN:
1359138fd1498Szrj return true;
1359238fd1498Szrj
1359338fd1498Szrj CASE_CFN_FMIN:
1359438fd1498Szrj CASE_CFN_FMIN_FN:
1359538fd1498Szrj CASE_CFN_FMAX:
1359638fd1498Szrj CASE_CFN_FMAX_FN:
1359738fd1498Szrj return RECURSE (arg0) && RECURSE (arg1);
1359838fd1498Szrj
1359938fd1498Szrj default:
1360038fd1498Szrj break;
1360138fd1498Szrj }
1360238fd1498Szrj return false;
1360338fd1498Szrj }
1360438fd1498Szrj
1360538fd1498Szrj /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
1360638fd1498Szrj has an integer value. We also allow +Inf, -Inf and NaN to be
1360738fd1498Szrj considered integer values. Return false for signaling NaN.
1360838fd1498Szrj
1360938fd1498Szrj DEPTH is the current nesting depth of the query. */
1361038fd1498Szrj
1361138fd1498Szrj bool
integer_valued_real_single_p(tree t,int depth)1361238fd1498Szrj integer_valued_real_single_p (tree t, int depth)
1361338fd1498Szrj {
1361438fd1498Szrj switch (TREE_CODE (t))
1361538fd1498Szrj {
1361638fd1498Szrj case REAL_CST:
1361738fd1498Szrj return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
1361838fd1498Szrj
1361938fd1498Szrj case COND_EXPR:
1362038fd1498Szrj return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
1362138fd1498Szrj
1362238fd1498Szrj case SSA_NAME:
1362338fd1498Szrj /* Limit the depth of recursion to avoid quadratic behavior.
1362438fd1498Szrj This is expected to catch almost all occurrences in practice.
1362538fd1498Szrj If this code misses important cases that unbounded recursion
1362638fd1498Szrj would not, passes that need this information could be revised
1362738fd1498Szrj to provide it through dataflow propagation. */
1362838fd1498Szrj return (!name_registered_for_update_p (t)
1362938fd1498Szrj && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
1363038fd1498Szrj && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
1363138fd1498Szrj depth));
1363238fd1498Szrj
1363338fd1498Szrj default:
1363438fd1498Szrj break;
1363538fd1498Szrj }
1363638fd1498Szrj return false;
1363738fd1498Szrj }
1363838fd1498Szrj
1363938fd1498Szrj /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
1364038fd1498Szrj has an integer value. We also allow +Inf, -Inf and NaN to be
1364138fd1498Szrj considered integer values. Return false for signaling NaN.
1364238fd1498Szrj
1364338fd1498Szrj DEPTH is the current nesting depth of the query. */
1364438fd1498Szrj
1364538fd1498Szrj static bool
integer_valued_real_invalid_p(tree t,int depth)1364638fd1498Szrj integer_valued_real_invalid_p (tree t, int depth)
1364738fd1498Szrj {
1364838fd1498Szrj switch (TREE_CODE (t))
1364938fd1498Szrj {
1365038fd1498Szrj case COMPOUND_EXPR:
1365138fd1498Szrj case MODIFY_EXPR:
1365238fd1498Szrj case BIND_EXPR:
1365338fd1498Szrj return RECURSE (TREE_OPERAND (t, 1));
1365438fd1498Szrj
1365538fd1498Szrj case SAVE_EXPR:
1365638fd1498Szrj return RECURSE (TREE_OPERAND (t, 0));
1365738fd1498Szrj
1365838fd1498Szrj default:
1365938fd1498Szrj break;
1366038fd1498Szrj }
1366138fd1498Szrj return false;
1366238fd1498Szrj }
1366338fd1498Szrj
1366438fd1498Szrj #undef RECURSE
1366538fd1498Szrj #undef integer_valued_real_p
1366638fd1498Szrj
1366738fd1498Szrj /* Return true if the floating point expression T has an integer value.
1366838fd1498Szrj We also allow +Inf, -Inf and NaN to be considered integer values.
1366938fd1498Szrj Return false for signaling NaN.
1367038fd1498Szrj
1367138fd1498Szrj DEPTH is the current nesting depth of the query. */
1367238fd1498Szrj
1367338fd1498Szrj bool
integer_valued_real_p(tree t,int depth)1367438fd1498Szrj integer_valued_real_p (tree t, int depth)
1367538fd1498Szrj {
1367638fd1498Szrj if (t == error_mark_node)
1367738fd1498Szrj return false;
1367838fd1498Szrj
1367938fd1498Szrj tree_code code = TREE_CODE (t);
1368038fd1498Szrj switch (TREE_CODE_CLASS (code))
1368138fd1498Szrj {
1368238fd1498Szrj case tcc_binary:
1368338fd1498Szrj case tcc_comparison:
1368438fd1498Szrj return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
1368538fd1498Szrj TREE_OPERAND (t, 1), depth);
1368638fd1498Szrj
1368738fd1498Szrj case tcc_unary:
1368838fd1498Szrj return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
1368938fd1498Szrj
1369038fd1498Szrj case tcc_constant:
1369138fd1498Szrj case tcc_declaration:
1369238fd1498Szrj case tcc_reference:
1369338fd1498Szrj return integer_valued_real_single_p (t, depth);
1369438fd1498Szrj
1369538fd1498Szrj default:
1369638fd1498Szrj break;
1369738fd1498Szrj }
1369838fd1498Szrj
1369938fd1498Szrj switch (code)
1370038fd1498Szrj {
1370138fd1498Szrj case COND_EXPR:
1370238fd1498Szrj case SSA_NAME:
1370338fd1498Szrj return integer_valued_real_single_p (t, depth);
1370438fd1498Szrj
1370538fd1498Szrj case CALL_EXPR:
1370638fd1498Szrj {
1370738fd1498Szrj tree arg0 = (call_expr_nargs (t) > 0
1370838fd1498Szrj ? CALL_EXPR_ARG (t, 0)
1370938fd1498Szrj : NULL_TREE);
1371038fd1498Szrj tree arg1 = (call_expr_nargs (t) > 1
1371138fd1498Szrj ? CALL_EXPR_ARG (t, 1)
1371238fd1498Szrj : NULL_TREE);
1371338fd1498Szrj return integer_valued_real_call_p (get_call_combined_fn (t),
1371438fd1498Szrj arg0, arg1, depth);
1371538fd1498Szrj }
1371638fd1498Szrj
1371738fd1498Szrj default:
1371838fd1498Szrj return integer_valued_real_invalid_p (t, depth);
1371938fd1498Szrj }
1372038fd1498Szrj }
1372138fd1498Szrj
1372238fd1498Szrj /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
1372338fd1498Szrj attempt to fold the expression to a constant without modifying TYPE,
1372438fd1498Szrj OP0 or OP1.
1372538fd1498Szrj
1372638fd1498Szrj If the expression could be simplified to a constant, then return
1372738fd1498Szrj the constant. If the expression would not be simplified to a
1372838fd1498Szrj constant, then return NULL_TREE. */
1372938fd1498Szrj
1373038fd1498Szrj tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)1373138fd1498Szrj fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
1373238fd1498Szrj {
1373338fd1498Szrj tree tem = fold_binary (code, type, op0, op1);
1373438fd1498Szrj return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
1373538fd1498Szrj }
1373638fd1498Szrj
1373738fd1498Szrj /* Given the components of a unary expression CODE, TYPE and OP0,
1373838fd1498Szrj attempt to fold the expression to a constant without modifying
1373938fd1498Szrj TYPE or OP0.
1374038fd1498Szrj
1374138fd1498Szrj If the expression could be simplified to a constant, then return
1374238fd1498Szrj the constant. If the expression would not be simplified to a
1374338fd1498Szrj constant, then return NULL_TREE. */
1374438fd1498Szrj
1374538fd1498Szrj tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)1374638fd1498Szrj fold_unary_to_constant (enum tree_code code, tree type, tree op0)
1374738fd1498Szrj {
1374838fd1498Szrj tree tem = fold_unary (code, type, op0);
1374938fd1498Szrj return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
1375038fd1498Szrj }
1375138fd1498Szrj
1375238fd1498Szrj /* If EXP represents referencing an element in a constant string
1375338fd1498Szrj (either via pointer arithmetic or array indexing), return the
1375438fd1498Szrj tree representing the value accessed, otherwise return NULL. */
1375538fd1498Szrj
1375638fd1498Szrj tree
fold_read_from_constant_string(tree exp)1375738fd1498Szrj fold_read_from_constant_string (tree exp)
1375838fd1498Szrj {
1375938fd1498Szrj if ((TREE_CODE (exp) == INDIRECT_REF
1376038fd1498Szrj || TREE_CODE (exp) == ARRAY_REF)
1376138fd1498Szrj && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
1376238fd1498Szrj {
1376338fd1498Szrj tree exp1 = TREE_OPERAND (exp, 0);
1376438fd1498Szrj tree index;
1376538fd1498Szrj tree string;
1376638fd1498Szrj location_t loc = EXPR_LOCATION (exp);
1376738fd1498Szrj
1376838fd1498Szrj if (TREE_CODE (exp) == INDIRECT_REF)
1376938fd1498Szrj string = string_constant (exp1, &index);
1377038fd1498Szrj else
1377138fd1498Szrj {
1377238fd1498Szrj tree low_bound = array_ref_low_bound (exp);
1377338fd1498Szrj index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
1377438fd1498Szrj
1377538fd1498Szrj /* Optimize the special-case of a zero lower bound.
1377638fd1498Szrj
1377738fd1498Szrj We convert the low_bound to sizetype to avoid some problems
1377838fd1498Szrj with constant folding. (E.g. suppose the lower bound is 1,
1377938fd1498Szrj and its mode is QI. Without the conversion,l (ARRAY
1378038fd1498Szrj +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
1378138fd1498Szrj +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
1378238fd1498Szrj if (! integer_zerop (low_bound))
1378338fd1498Szrj index = size_diffop_loc (loc, index,
1378438fd1498Szrj fold_convert_loc (loc, sizetype, low_bound));
1378538fd1498Szrj
1378638fd1498Szrj string = exp1;
1378738fd1498Szrj }
1378838fd1498Szrj
1378938fd1498Szrj scalar_int_mode char_mode;
1379038fd1498Szrj if (string
1379138fd1498Szrj && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
1379238fd1498Szrj && TREE_CODE (string) == STRING_CST
1379338fd1498Szrj && TREE_CODE (index) == INTEGER_CST
1379438fd1498Szrj && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
1379538fd1498Szrj && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
1379638fd1498Szrj &char_mode)
1379738fd1498Szrj && GET_MODE_SIZE (char_mode) == 1)
1379838fd1498Szrj return build_int_cst_type (TREE_TYPE (exp),
1379938fd1498Szrj (TREE_STRING_POINTER (string)
1380038fd1498Szrj [TREE_INT_CST_LOW (index)]));
1380138fd1498Szrj }
1380238fd1498Szrj return NULL;
1380338fd1498Szrj }
1380438fd1498Szrj
1380538fd1498Szrj /* Return the tree for neg (ARG0) when ARG0 is known to be either
1380638fd1498Szrj an integer constant, real, or fixed-point constant.
1380738fd1498Szrj
1380838fd1498Szrj TYPE is the type of the result. */
1380938fd1498Szrj
1381038fd1498Szrj static tree
fold_negate_const(tree arg0,tree type)1381138fd1498Szrj fold_negate_const (tree arg0, tree type)
1381238fd1498Szrj {
1381338fd1498Szrj tree t = NULL_TREE;
1381438fd1498Szrj
1381538fd1498Szrj switch (TREE_CODE (arg0))
1381638fd1498Szrj {
1381738fd1498Szrj case REAL_CST:
1381838fd1498Szrj t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
1381938fd1498Szrj break;
1382038fd1498Szrj
1382138fd1498Szrj case FIXED_CST:
1382238fd1498Szrj {
1382338fd1498Szrj FIXED_VALUE_TYPE f;
1382438fd1498Szrj bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
1382538fd1498Szrj &(TREE_FIXED_CST (arg0)), NULL,
1382638fd1498Szrj TYPE_SATURATING (type));
1382738fd1498Szrj t = build_fixed (type, f);
1382838fd1498Szrj /* Propagate overflow flags. */
1382938fd1498Szrj if (overflow_p | TREE_OVERFLOW (arg0))
1383038fd1498Szrj TREE_OVERFLOW (t) = 1;
1383138fd1498Szrj break;
1383238fd1498Szrj }
1383338fd1498Szrj
1383438fd1498Szrj default:
1383538fd1498Szrj if (poly_int_tree_p (arg0))
1383638fd1498Szrj {
1383738fd1498Szrj bool overflow;
1383838fd1498Szrj poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
1383938fd1498Szrj t = force_fit_type (type, res, 1,
1384038fd1498Szrj (overflow && ! TYPE_UNSIGNED (type))
1384138fd1498Szrj || TREE_OVERFLOW (arg0));
1384238fd1498Szrj break;
1384338fd1498Szrj }
1384438fd1498Szrj
1384538fd1498Szrj gcc_unreachable ();
1384638fd1498Szrj }
1384738fd1498Szrj
1384838fd1498Szrj return t;
1384938fd1498Szrj }
1385038fd1498Szrj
1385138fd1498Szrj /* Return the tree for abs (ARG0) when ARG0 is known to be either
1385238fd1498Szrj an integer constant or real constant.
1385338fd1498Szrj
1385438fd1498Szrj TYPE is the type of the result. */
1385538fd1498Szrj
1385638fd1498Szrj tree
fold_abs_const(tree arg0,tree type)1385738fd1498Szrj fold_abs_const (tree arg0, tree type)
1385838fd1498Szrj {
1385938fd1498Szrj tree t = NULL_TREE;
1386038fd1498Szrj
1386138fd1498Szrj switch (TREE_CODE (arg0))
1386238fd1498Szrj {
1386338fd1498Szrj case INTEGER_CST:
1386438fd1498Szrj {
1386538fd1498Szrj /* If the value is unsigned or non-negative, then the absolute value
1386638fd1498Szrj is the same as the ordinary value. */
1386738fd1498Szrj if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
1386838fd1498Szrj t = arg0;
1386938fd1498Szrj
1387038fd1498Szrj /* If the value is negative, then the absolute value is
1387138fd1498Szrj its negation. */
1387238fd1498Szrj else
1387338fd1498Szrj {
1387438fd1498Szrj bool overflow;
1387538fd1498Szrj wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
1387638fd1498Szrj t = force_fit_type (type, val, -1,
1387738fd1498Szrj overflow | TREE_OVERFLOW (arg0));
1387838fd1498Szrj }
1387938fd1498Szrj }
1388038fd1498Szrj break;
1388138fd1498Szrj
1388238fd1498Szrj case REAL_CST:
1388338fd1498Szrj if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
1388438fd1498Szrj t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
1388538fd1498Szrj else
1388638fd1498Szrj t = arg0;
1388738fd1498Szrj break;
1388838fd1498Szrj
1388938fd1498Szrj default:
1389038fd1498Szrj gcc_unreachable ();
1389138fd1498Szrj }
1389238fd1498Szrj
1389338fd1498Szrj return t;
1389438fd1498Szrj }
1389538fd1498Szrj
1389638fd1498Szrj /* Return the tree for not (ARG0) when ARG0 is known to be an integer
1389738fd1498Szrj constant. TYPE is the type of the result. */
1389838fd1498Szrj
1389938fd1498Szrj static tree
fold_not_const(const_tree arg0,tree type)1390038fd1498Szrj fold_not_const (const_tree arg0, tree type)
1390138fd1498Szrj {
1390238fd1498Szrj gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
1390338fd1498Szrj
1390438fd1498Szrj return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
1390538fd1498Szrj }
1390638fd1498Szrj
1390738fd1498Szrj /* Given CODE, a relational operator, the target type, TYPE and two
1390838fd1498Szrj constant operands OP0 and OP1, return the result of the
1390938fd1498Szrj relational operation. If the result is not a compile time
1391038fd1498Szrj constant, then return NULL_TREE. */
1391138fd1498Szrj
1391238fd1498Szrj static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)1391338fd1498Szrj fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
1391438fd1498Szrj {
1391538fd1498Szrj int result, invert;
1391638fd1498Szrj
1391738fd1498Szrj /* From here on, the only cases we handle are when the result is
1391838fd1498Szrj known to be a constant. */
1391938fd1498Szrj
1392038fd1498Szrj if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
1392138fd1498Szrj {
1392238fd1498Szrj const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
1392338fd1498Szrj const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
1392438fd1498Szrj
1392538fd1498Szrj /* Handle the cases where either operand is a NaN. */
1392638fd1498Szrj if (real_isnan (c0) || real_isnan (c1))
1392738fd1498Szrj {
1392838fd1498Szrj switch (code)
1392938fd1498Szrj {
1393038fd1498Szrj case EQ_EXPR:
1393138fd1498Szrj case ORDERED_EXPR:
1393238fd1498Szrj result = 0;
1393338fd1498Szrj break;
1393438fd1498Szrj
1393538fd1498Szrj case NE_EXPR:
1393638fd1498Szrj case UNORDERED_EXPR:
1393738fd1498Szrj case UNLT_EXPR:
1393838fd1498Szrj case UNLE_EXPR:
1393938fd1498Szrj case UNGT_EXPR:
1394038fd1498Szrj case UNGE_EXPR:
1394138fd1498Szrj case UNEQ_EXPR:
1394238fd1498Szrj result = 1;
1394338fd1498Szrj break;
1394438fd1498Szrj
1394538fd1498Szrj case LT_EXPR:
1394638fd1498Szrj case LE_EXPR:
1394738fd1498Szrj case GT_EXPR:
1394838fd1498Szrj case GE_EXPR:
1394938fd1498Szrj case LTGT_EXPR:
1395038fd1498Szrj if (flag_trapping_math)
1395138fd1498Szrj return NULL_TREE;
1395238fd1498Szrj result = 0;
1395338fd1498Szrj break;
1395438fd1498Szrj
1395538fd1498Szrj default:
1395638fd1498Szrj gcc_unreachable ();
1395738fd1498Szrj }
1395838fd1498Szrj
1395938fd1498Szrj return constant_boolean_node (result, type);
1396038fd1498Szrj }
1396138fd1498Szrj
1396238fd1498Szrj return constant_boolean_node (real_compare (code, c0, c1), type);
1396338fd1498Szrj }
1396438fd1498Szrj
1396538fd1498Szrj if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
1396638fd1498Szrj {
1396738fd1498Szrj const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
1396838fd1498Szrj const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
1396938fd1498Szrj return constant_boolean_node (fixed_compare (code, c0, c1), type);
1397038fd1498Szrj }
1397138fd1498Szrj
1397238fd1498Szrj /* Handle equality/inequality of complex constants. */
1397338fd1498Szrj if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
1397438fd1498Szrj {
1397538fd1498Szrj tree rcond = fold_relational_const (code, type,
1397638fd1498Szrj TREE_REALPART (op0),
1397738fd1498Szrj TREE_REALPART (op1));
1397838fd1498Szrj tree icond = fold_relational_const (code, type,
1397938fd1498Szrj TREE_IMAGPART (op0),
1398038fd1498Szrj TREE_IMAGPART (op1));
1398138fd1498Szrj if (code == EQ_EXPR)
1398238fd1498Szrj return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
1398338fd1498Szrj else if (code == NE_EXPR)
1398438fd1498Szrj return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
1398538fd1498Szrj else
1398638fd1498Szrj return NULL_TREE;
1398738fd1498Szrj }
1398838fd1498Szrj
1398938fd1498Szrj if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
1399038fd1498Szrj {
1399138fd1498Szrj if (!VECTOR_TYPE_P (type))
1399238fd1498Szrj {
1399338fd1498Szrj /* Have vector comparison with scalar boolean result. */
1399438fd1498Szrj gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
1399538fd1498Szrj && known_eq (VECTOR_CST_NELTS (op0),
1399638fd1498Szrj VECTOR_CST_NELTS (op1)));
1399738fd1498Szrj unsigned HOST_WIDE_INT nunits;
1399838fd1498Szrj if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
1399938fd1498Szrj return NULL_TREE;
1400038fd1498Szrj for (unsigned i = 0; i < nunits; i++)
1400138fd1498Szrj {
1400238fd1498Szrj tree elem0 = VECTOR_CST_ELT (op0, i);
1400338fd1498Szrj tree elem1 = VECTOR_CST_ELT (op1, i);
1400438fd1498Szrj tree tmp = fold_relational_const (code, type, elem0, elem1);
1400538fd1498Szrj if (tmp == NULL_TREE)
1400638fd1498Szrj return NULL_TREE;
1400738fd1498Szrj if (integer_zerop (tmp))
1400838fd1498Szrj return constant_boolean_node (false, type);
1400938fd1498Szrj }
1401038fd1498Szrj return constant_boolean_node (true, type);
1401138fd1498Szrj }
1401238fd1498Szrj tree_vector_builder elts;
1401338fd1498Szrj if (!elts.new_binary_operation (type, op0, op1, false))
1401438fd1498Szrj return NULL_TREE;
1401538fd1498Szrj unsigned int count = elts.encoded_nelts ();
1401638fd1498Szrj for (unsigned i = 0; i < count; i++)
1401738fd1498Szrj {
1401838fd1498Szrj tree elem_type = TREE_TYPE (type);
1401938fd1498Szrj tree elem0 = VECTOR_CST_ELT (op0, i);
1402038fd1498Szrj tree elem1 = VECTOR_CST_ELT (op1, i);
1402138fd1498Szrj
1402238fd1498Szrj tree tem = fold_relational_const (code, elem_type,
1402338fd1498Szrj elem0, elem1);
1402438fd1498Szrj
1402538fd1498Szrj if (tem == NULL_TREE)
1402638fd1498Szrj return NULL_TREE;
1402738fd1498Szrj
1402838fd1498Szrj elts.quick_push (build_int_cst (elem_type,
1402938fd1498Szrj integer_zerop (tem) ? 0 : -1));
1403038fd1498Szrj }
1403138fd1498Szrj
1403238fd1498Szrj return elts.build ();
1403338fd1498Szrj }
1403438fd1498Szrj
1403538fd1498Szrj /* From here on we only handle LT, LE, GT, GE, EQ and NE.
1403638fd1498Szrj
1403738fd1498Szrj To compute GT, swap the arguments and do LT.
1403838fd1498Szrj To compute GE, do LT and invert the result.
1403938fd1498Szrj To compute LE, swap the arguments, do LT and invert the result.
1404038fd1498Szrj To compute NE, do EQ and invert the result.
1404138fd1498Szrj
1404238fd1498Szrj Therefore, the code below must handle only EQ and LT. */
1404338fd1498Szrj
1404438fd1498Szrj if (code == LE_EXPR || code == GT_EXPR)
1404538fd1498Szrj {
1404638fd1498Szrj std::swap (op0, op1);
1404738fd1498Szrj code = swap_tree_comparison (code);
1404838fd1498Szrj }
1404938fd1498Szrj
1405038fd1498Szrj /* Note that it is safe to invert for real values here because we
1405138fd1498Szrj have already handled the one case that it matters. */
1405238fd1498Szrj
1405338fd1498Szrj invert = 0;
1405438fd1498Szrj if (code == NE_EXPR || code == GE_EXPR)
1405538fd1498Szrj {
1405638fd1498Szrj invert = 1;
1405738fd1498Szrj code = invert_tree_comparison (code, false);
1405838fd1498Szrj }
1405938fd1498Szrj
1406038fd1498Szrj /* Compute a result for LT or EQ if args permit;
1406138fd1498Szrj Otherwise return T. */
1406238fd1498Szrj if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
1406338fd1498Szrj {
1406438fd1498Szrj if (code == EQ_EXPR)
1406538fd1498Szrj result = tree_int_cst_equal (op0, op1);
1406638fd1498Szrj else
1406738fd1498Szrj result = tree_int_cst_lt (op0, op1);
1406838fd1498Szrj }
1406938fd1498Szrj else
1407038fd1498Szrj return NULL_TREE;
1407138fd1498Szrj
1407238fd1498Szrj if (invert)
1407338fd1498Szrj result ^= 1;
1407438fd1498Szrj return constant_boolean_node (result, type);
1407538fd1498Szrj }
1407638fd1498Szrj
1407738fd1498Szrj /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
1407838fd1498Szrj indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
1407938fd1498Szrj itself. */
1408038fd1498Szrj
1408138fd1498Szrj tree
fold_build_cleanup_point_expr(tree type,tree expr)1408238fd1498Szrj fold_build_cleanup_point_expr (tree type, tree expr)
1408338fd1498Szrj {
1408438fd1498Szrj /* If the expression does not have side effects then we don't have to wrap
1408538fd1498Szrj it with a cleanup point expression. */
1408638fd1498Szrj if (!TREE_SIDE_EFFECTS (expr))
1408738fd1498Szrj return expr;
1408838fd1498Szrj
1408938fd1498Szrj /* If the expression is a return, check to see if the expression inside the
1409038fd1498Szrj return has no side effects or the right hand side of the modify expression
1409138fd1498Szrj inside the return. If either don't have side effects set we don't need to
1409238fd1498Szrj wrap the expression in a cleanup point expression. Note we don't check the
1409338fd1498Szrj left hand side of the modify because it should always be a return decl. */
1409438fd1498Szrj if (TREE_CODE (expr) == RETURN_EXPR)
1409538fd1498Szrj {
1409638fd1498Szrj tree op = TREE_OPERAND (expr, 0);
1409738fd1498Szrj if (!op || !TREE_SIDE_EFFECTS (op))
1409838fd1498Szrj return expr;
1409938fd1498Szrj op = TREE_OPERAND (op, 1);
1410038fd1498Szrj if (!TREE_SIDE_EFFECTS (op))
1410138fd1498Szrj return expr;
1410238fd1498Szrj }
1410338fd1498Szrj
1410438fd1498Szrj return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
1410538fd1498Szrj }
1410638fd1498Szrj
1410738fd1498Szrj /* Given a pointer value OP0 and a type TYPE, return a simplified version
1410838fd1498Szrj of an indirection through OP0, or NULL_TREE if no simplification is
1410938fd1498Szrj possible. */
1411038fd1498Szrj
1411138fd1498Szrj tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)1411238fd1498Szrj fold_indirect_ref_1 (location_t loc, tree type, tree op0)
1411338fd1498Szrj {
1411438fd1498Szrj tree sub = op0;
1411538fd1498Szrj tree subtype;
1411638fd1498Szrj poly_uint64 const_op01;
1411738fd1498Szrj
1411838fd1498Szrj STRIP_NOPS (sub);
1411938fd1498Szrj subtype = TREE_TYPE (sub);
1412038fd1498Szrj if (!POINTER_TYPE_P (subtype)
1412138fd1498Szrj || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
1412238fd1498Szrj return NULL_TREE;
1412338fd1498Szrj
1412438fd1498Szrj if (TREE_CODE (sub) == ADDR_EXPR)
1412538fd1498Szrj {
1412638fd1498Szrj tree op = TREE_OPERAND (sub, 0);
1412738fd1498Szrj tree optype = TREE_TYPE (op);
1412838fd1498Szrj
1412938fd1498Szrj /* *&CONST_DECL -> to the value of the const decl. */
1413038fd1498Szrj if (TREE_CODE (op) == CONST_DECL)
1413138fd1498Szrj return DECL_INITIAL (op);
1413238fd1498Szrj /* *&p => p; make sure to handle *&"str"[cst] here. */
1413338fd1498Szrj if (type == optype)
1413438fd1498Szrj {
1413538fd1498Szrj tree fop = fold_read_from_constant_string (op);
1413638fd1498Szrj if (fop)
1413738fd1498Szrj return fop;
1413838fd1498Szrj else
1413938fd1498Szrj return op;
1414038fd1498Szrj }
1414138fd1498Szrj /* *(foo *)&fooarray => fooarray[0] */
1414238fd1498Szrj else if (TREE_CODE (optype) == ARRAY_TYPE
1414338fd1498Szrj && type == TREE_TYPE (optype)
1414438fd1498Szrj && (!in_gimple_form
1414538fd1498Szrj || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
1414638fd1498Szrj {
1414738fd1498Szrj tree type_domain = TYPE_DOMAIN (optype);
1414838fd1498Szrj tree min_val = size_zero_node;
1414938fd1498Szrj if (type_domain && TYPE_MIN_VALUE (type_domain))
1415038fd1498Szrj min_val = TYPE_MIN_VALUE (type_domain);
1415138fd1498Szrj if (in_gimple_form
1415238fd1498Szrj && TREE_CODE (min_val) != INTEGER_CST)
1415338fd1498Szrj return NULL_TREE;
1415438fd1498Szrj return build4_loc (loc, ARRAY_REF, type, op, min_val,
1415538fd1498Szrj NULL_TREE, NULL_TREE);
1415638fd1498Szrj }
1415738fd1498Szrj /* *(foo *)&complexfoo => __real__ complexfoo */
1415838fd1498Szrj else if (TREE_CODE (optype) == COMPLEX_TYPE
1415938fd1498Szrj && type == TREE_TYPE (optype))
1416038fd1498Szrj return fold_build1_loc (loc, REALPART_EXPR, type, op);
1416138fd1498Szrj /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
1416238fd1498Szrj else if (VECTOR_TYPE_P (optype)
1416338fd1498Szrj && type == TREE_TYPE (optype))
1416438fd1498Szrj {
1416538fd1498Szrj tree part_width = TYPE_SIZE (type);
1416638fd1498Szrj tree index = bitsize_int (0);
1416738fd1498Szrj return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
1416838fd1498Szrj index);
1416938fd1498Szrj }
1417038fd1498Szrj }
1417138fd1498Szrj
1417238fd1498Szrj if (TREE_CODE (sub) == POINTER_PLUS_EXPR
1417338fd1498Szrj && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
1417438fd1498Szrj {
1417538fd1498Szrj tree op00 = TREE_OPERAND (sub, 0);
1417638fd1498Szrj tree op01 = TREE_OPERAND (sub, 1);
1417738fd1498Szrj
1417838fd1498Szrj STRIP_NOPS (op00);
1417938fd1498Szrj if (TREE_CODE (op00) == ADDR_EXPR)
1418038fd1498Szrj {
1418138fd1498Szrj tree op00type;
1418238fd1498Szrj op00 = TREE_OPERAND (op00, 0);
1418338fd1498Szrj op00type = TREE_TYPE (op00);
1418438fd1498Szrj
1418538fd1498Szrj /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
1418638fd1498Szrj if (VECTOR_TYPE_P (op00type)
1418738fd1498Szrj && type == TREE_TYPE (op00type)
1418838fd1498Szrj /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
1418938fd1498Szrj but we want to treat offsets with MSB set as negative.
1419038fd1498Szrj For the code below negative offsets are invalid and
1419138fd1498Szrj TYPE_SIZE of the element is something unsigned, so
1419238fd1498Szrj check whether op01 fits into poly_int64, which implies
1419338fd1498Szrj it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
1419438fd1498Szrj then just use poly_uint64 because we want to treat the
1419538fd1498Szrj value as unsigned. */
1419638fd1498Szrj && tree_fits_poly_int64_p (op01))
1419738fd1498Szrj {
1419838fd1498Szrj tree part_width = TYPE_SIZE (type);
1419938fd1498Szrj poly_uint64 max_offset
1420038fd1498Szrj = (tree_to_uhwi (part_width) / BITS_PER_UNIT
1420138fd1498Szrj * TYPE_VECTOR_SUBPARTS (op00type));
1420238fd1498Szrj if (known_lt (const_op01, max_offset))
1420338fd1498Szrj {
1420438fd1498Szrj tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
1420538fd1498Szrj return fold_build3_loc (loc,
1420638fd1498Szrj BIT_FIELD_REF, type, op00,
1420738fd1498Szrj part_width, index);
1420838fd1498Szrj }
1420938fd1498Szrj }
1421038fd1498Szrj /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
1421138fd1498Szrj else if (TREE_CODE (op00type) == COMPLEX_TYPE
1421238fd1498Szrj && type == TREE_TYPE (op00type))
1421338fd1498Szrj {
1421438fd1498Szrj if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
1421538fd1498Szrj const_op01))
1421638fd1498Szrj return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
1421738fd1498Szrj }
1421838fd1498Szrj /* ((foo *)&fooarray)[1] => fooarray[1] */
1421938fd1498Szrj else if (TREE_CODE (op00type) == ARRAY_TYPE
1422038fd1498Szrj && type == TREE_TYPE (op00type))
1422138fd1498Szrj {
1422238fd1498Szrj tree type_domain = TYPE_DOMAIN (op00type);
1422338fd1498Szrj tree min_val = size_zero_node;
1422438fd1498Szrj if (type_domain && TYPE_MIN_VALUE (type_domain))
1422538fd1498Szrj min_val = TYPE_MIN_VALUE (type_domain);
1422638fd1498Szrj offset_int off = wi::to_offset (op01);
1422738fd1498Szrj offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
1422838fd1498Szrj offset_int remainder;
1422938fd1498Szrj off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
1423038fd1498Szrj if (remainder == 0 && TREE_CODE (min_val) == INTEGER_CST)
1423138fd1498Szrj {
1423238fd1498Szrj off = off + wi::to_offset (min_val);
1423338fd1498Szrj op01 = wide_int_to_tree (sizetype, off);
1423438fd1498Szrj return build4_loc (loc, ARRAY_REF, type, op00, op01,
1423538fd1498Szrj NULL_TREE, NULL_TREE);
1423638fd1498Szrj }
1423738fd1498Szrj }
1423838fd1498Szrj }
1423938fd1498Szrj }
1424038fd1498Szrj
1424138fd1498Szrj /* *(foo *)fooarrptr => (*fooarrptr)[0] */
1424238fd1498Szrj if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
1424338fd1498Szrj && type == TREE_TYPE (TREE_TYPE (subtype))
1424438fd1498Szrj && (!in_gimple_form
1424538fd1498Szrj || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
1424638fd1498Szrj {
1424738fd1498Szrj tree type_domain;
1424838fd1498Szrj tree min_val = size_zero_node;
1424938fd1498Szrj sub = build_fold_indirect_ref_loc (loc, sub);
1425038fd1498Szrj type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
1425138fd1498Szrj if (type_domain && TYPE_MIN_VALUE (type_domain))
1425238fd1498Szrj min_val = TYPE_MIN_VALUE (type_domain);
1425338fd1498Szrj if (in_gimple_form
1425438fd1498Szrj && TREE_CODE (min_val) != INTEGER_CST)
1425538fd1498Szrj return NULL_TREE;
1425638fd1498Szrj return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
1425738fd1498Szrj NULL_TREE);
1425838fd1498Szrj }
1425938fd1498Szrj
1426038fd1498Szrj return NULL_TREE;
1426138fd1498Szrj }
1426238fd1498Szrj
1426338fd1498Szrj /* Builds an expression for an indirection through T, simplifying some
1426438fd1498Szrj cases. */
1426538fd1498Szrj
1426638fd1498Szrj tree
build_fold_indirect_ref_loc(location_t loc,tree t)1426738fd1498Szrj build_fold_indirect_ref_loc (location_t loc, tree t)
1426838fd1498Szrj {
1426938fd1498Szrj tree type = TREE_TYPE (TREE_TYPE (t));
1427038fd1498Szrj tree sub = fold_indirect_ref_1 (loc, type, t);
1427138fd1498Szrj
1427238fd1498Szrj if (sub)
1427338fd1498Szrj return sub;
1427438fd1498Szrj
1427538fd1498Szrj return build1_loc (loc, INDIRECT_REF, type, t);
1427638fd1498Szrj }
1427738fd1498Szrj
1427838fd1498Szrj /* Given an INDIRECT_REF T, return either T or a simplified version. */
1427938fd1498Szrj
1428038fd1498Szrj tree
fold_indirect_ref_loc(location_t loc,tree t)1428138fd1498Szrj fold_indirect_ref_loc (location_t loc, tree t)
1428238fd1498Szrj {
1428338fd1498Szrj tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
1428438fd1498Szrj
1428538fd1498Szrj if (sub)
1428638fd1498Szrj return sub;
1428738fd1498Szrj else
1428838fd1498Szrj return t;
1428938fd1498Szrj }
1429038fd1498Szrj
1429138fd1498Szrj /* Strip non-trapping, non-side-effecting tree nodes from an expression
1429238fd1498Szrj whose result is ignored. The type of the returned tree need not be
1429338fd1498Szrj the same as the original expression. */
1429438fd1498Szrj
1429538fd1498Szrj tree
fold_ignored_result(tree t)1429638fd1498Szrj fold_ignored_result (tree t)
1429738fd1498Szrj {
1429838fd1498Szrj if (!TREE_SIDE_EFFECTS (t))
1429938fd1498Szrj return integer_zero_node;
1430038fd1498Szrj
1430138fd1498Szrj for (;;)
1430238fd1498Szrj switch (TREE_CODE_CLASS (TREE_CODE (t)))
1430338fd1498Szrj {
1430438fd1498Szrj case tcc_unary:
1430538fd1498Szrj t = TREE_OPERAND (t, 0);
1430638fd1498Szrj break;
1430738fd1498Szrj
1430838fd1498Szrj case tcc_binary:
1430938fd1498Szrj case tcc_comparison:
1431038fd1498Szrj if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
1431138fd1498Szrj t = TREE_OPERAND (t, 0);
1431238fd1498Szrj else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
1431338fd1498Szrj t = TREE_OPERAND (t, 1);
1431438fd1498Szrj else
1431538fd1498Szrj return t;
1431638fd1498Szrj break;
1431738fd1498Szrj
1431838fd1498Szrj case tcc_expression:
1431938fd1498Szrj switch (TREE_CODE (t))
1432038fd1498Szrj {
1432138fd1498Szrj case COMPOUND_EXPR:
1432238fd1498Szrj if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
1432338fd1498Szrj return t;
1432438fd1498Szrj t = TREE_OPERAND (t, 0);
1432538fd1498Szrj break;
1432638fd1498Szrj
1432738fd1498Szrj case COND_EXPR:
1432838fd1498Szrj if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
1432938fd1498Szrj || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
1433038fd1498Szrj return t;
1433138fd1498Szrj t = TREE_OPERAND (t, 0);
1433238fd1498Szrj break;
1433338fd1498Szrj
1433438fd1498Szrj default:
1433538fd1498Szrj return t;
1433638fd1498Szrj }
1433738fd1498Szrj break;
1433838fd1498Szrj
1433938fd1498Szrj default:
1434038fd1498Szrj return t;
1434138fd1498Szrj }
1434238fd1498Szrj }
1434338fd1498Szrj
1434438fd1498Szrj /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
1434538fd1498Szrj
1434638fd1498Szrj tree
round_up_loc(location_t loc,tree value,unsigned int divisor)1434738fd1498Szrj round_up_loc (location_t loc, tree value, unsigned int divisor)
1434838fd1498Szrj {
1434938fd1498Szrj tree div = NULL_TREE;
1435038fd1498Szrj
1435138fd1498Szrj if (divisor == 1)
1435238fd1498Szrj return value;
1435338fd1498Szrj
1435438fd1498Szrj /* See if VALUE is already a multiple of DIVISOR. If so, we don't
1435538fd1498Szrj have to do anything. Only do this when we are not given a const,
1435638fd1498Szrj because in that case, this check is more expensive than just
1435738fd1498Szrj doing it. */
1435838fd1498Szrj if (TREE_CODE (value) != INTEGER_CST)
1435938fd1498Szrj {
1436038fd1498Szrj div = build_int_cst (TREE_TYPE (value), divisor);
1436138fd1498Szrj
1436238fd1498Szrj if (multiple_of_p (TREE_TYPE (value), value, div))
1436338fd1498Szrj return value;
1436438fd1498Szrj }
1436538fd1498Szrj
1436638fd1498Szrj /* If divisor is a power of two, simplify this to bit manipulation. */
1436738fd1498Szrj if (pow2_or_zerop (divisor))
1436838fd1498Szrj {
1436938fd1498Szrj if (TREE_CODE (value) == INTEGER_CST)
1437038fd1498Szrj {
1437138fd1498Szrj wide_int val = wi::to_wide (value);
1437238fd1498Szrj bool overflow_p;
1437338fd1498Szrj
1437438fd1498Szrj if ((val & (divisor - 1)) == 0)
1437538fd1498Szrj return value;
1437638fd1498Szrj
1437738fd1498Szrj overflow_p = TREE_OVERFLOW (value);
1437838fd1498Szrj val += divisor - 1;
1437938fd1498Szrj val &= (int) -divisor;
1438038fd1498Szrj if (val == 0)
1438138fd1498Szrj overflow_p = true;
1438238fd1498Szrj
1438338fd1498Szrj return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
1438438fd1498Szrj }
1438538fd1498Szrj else
1438638fd1498Szrj {
1438738fd1498Szrj tree t;
1438838fd1498Szrj
1438938fd1498Szrj t = build_int_cst (TREE_TYPE (value), divisor - 1);
1439038fd1498Szrj value = size_binop_loc (loc, PLUS_EXPR, value, t);
1439138fd1498Szrj t = build_int_cst (TREE_TYPE (value), - (int) divisor);
1439238fd1498Szrj value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
1439338fd1498Szrj }
1439438fd1498Szrj }
1439538fd1498Szrj else
1439638fd1498Szrj {
1439738fd1498Szrj if (!div)
1439838fd1498Szrj div = build_int_cst (TREE_TYPE (value), divisor);
1439938fd1498Szrj value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
1440038fd1498Szrj value = size_binop_loc (loc, MULT_EXPR, value, div);
1440138fd1498Szrj }
1440238fd1498Szrj
1440338fd1498Szrj return value;
1440438fd1498Szrj }
1440538fd1498Szrj
1440638fd1498Szrj /* Likewise, but round down. */
1440738fd1498Szrj
1440838fd1498Szrj tree
round_down_loc(location_t loc,tree value,int divisor)1440938fd1498Szrj round_down_loc (location_t loc, tree value, int divisor)
1441038fd1498Szrj {
1441138fd1498Szrj tree div = NULL_TREE;
1441238fd1498Szrj
1441338fd1498Szrj gcc_assert (divisor > 0);
1441438fd1498Szrj if (divisor == 1)
1441538fd1498Szrj return value;
1441638fd1498Szrj
1441738fd1498Szrj /* See if VALUE is already a multiple of DIVISOR. If so, we don't
1441838fd1498Szrj have to do anything. Only do this when we are not given a const,
1441938fd1498Szrj because in that case, this check is more expensive than just
1442038fd1498Szrj doing it. */
1442138fd1498Szrj if (TREE_CODE (value) != INTEGER_CST)
1442238fd1498Szrj {
1442338fd1498Szrj div = build_int_cst (TREE_TYPE (value), divisor);
1442438fd1498Szrj
1442538fd1498Szrj if (multiple_of_p (TREE_TYPE (value), value, div))
1442638fd1498Szrj return value;
1442738fd1498Szrj }
1442838fd1498Szrj
1442938fd1498Szrj /* If divisor is a power of two, simplify this to bit manipulation. */
1443038fd1498Szrj if (pow2_or_zerop (divisor))
1443138fd1498Szrj {
1443238fd1498Szrj tree t;
1443338fd1498Szrj
1443438fd1498Szrj t = build_int_cst (TREE_TYPE (value), -divisor);
1443538fd1498Szrj value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
1443638fd1498Szrj }
1443738fd1498Szrj else
1443838fd1498Szrj {
1443938fd1498Szrj if (!div)
1444038fd1498Szrj div = build_int_cst (TREE_TYPE (value), divisor);
1444138fd1498Szrj value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
1444238fd1498Szrj value = size_binop_loc (loc, MULT_EXPR, value, div);
1444338fd1498Szrj }
1444438fd1498Szrj
1444538fd1498Szrj return value;
1444638fd1498Szrj }
1444738fd1498Szrj
1444838fd1498Szrj /* Returns the pointer to the base of the object addressed by EXP and
1444938fd1498Szrj extracts the information about the offset of the access, storing it
1445038fd1498Szrj to PBITPOS and POFFSET. */
1445138fd1498Szrj
1445238fd1498Szrj static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)1445338fd1498Szrj split_address_to_core_and_offset (tree exp,
1445438fd1498Szrj poly_int64_pod *pbitpos, tree *poffset)
1445538fd1498Szrj {
1445638fd1498Szrj tree core;
1445738fd1498Szrj machine_mode mode;
1445838fd1498Szrj int unsignedp, reversep, volatilep;
1445938fd1498Szrj poly_int64 bitsize;
1446038fd1498Szrj location_t loc = EXPR_LOCATION (exp);
1446138fd1498Szrj
1446238fd1498Szrj if (TREE_CODE (exp) == ADDR_EXPR)
1446338fd1498Szrj {
1446438fd1498Szrj core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
1446538fd1498Szrj poffset, &mode, &unsignedp, &reversep,
1446638fd1498Szrj &volatilep);
1446738fd1498Szrj core = build_fold_addr_expr_loc (loc, core);
1446838fd1498Szrj }
1446938fd1498Szrj else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
1447038fd1498Szrj {
1447138fd1498Szrj core = TREE_OPERAND (exp, 0);
1447238fd1498Szrj STRIP_NOPS (core);
1447338fd1498Szrj *pbitpos = 0;
1447438fd1498Szrj *poffset = TREE_OPERAND (exp, 1);
1447538fd1498Szrj if (poly_int_tree_p (*poffset))
1447638fd1498Szrj {
1447738fd1498Szrj poly_offset_int tem
1447838fd1498Szrj = wi::sext (wi::to_poly_offset (*poffset),
1447938fd1498Szrj TYPE_PRECISION (TREE_TYPE (*poffset)));
1448038fd1498Szrj tem <<= LOG2_BITS_PER_UNIT;
1448138fd1498Szrj if (tem.to_shwi (pbitpos))
1448238fd1498Szrj *poffset = NULL_TREE;
1448338fd1498Szrj }
1448438fd1498Szrj }
1448538fd1498Szrj else
1448638fd1498Szrj {
1448738fd1498Szrj core = exp;
1448838fd1498Szrj *pbitpos = 0;
1448938fd1498Szrj *poffset = NULL_TREE;
1449038fd1498Szrj }
1449138fd1498Szrj
1449238fd1498Szrj return core;
1449338fd1498Szrj }
1449438fd1498Szrj
1449538fd1498Szrj /* Returns true if addresses of E1 and E2 differ by a constant, false
1449638fd1498Szrj otherwise. If they do, E1 - E2 is stored in *DIFF. */
1449738fd1498Szrj
1449838fd1498Szrj bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)1449938fd1498Szrj ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
1450038fd1498Szrj {
1450138fd1498Szrj tree core1, core2;
1450238fd1498Szrj poly_int64 bitpos1, bitpos2;
1450338fd1498Szrj tree toffset1, toffset2, tdiff, type;
1450438fd1498Szrj
1450538fd1498Szrj core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
1450638fd1498Szrj core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
1450738fd1498Szrj
1450838fd1498Szrj poly_int64 bytepos1, bytepos2;
1450938fd1498Szrj if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
1451038fd1498Szrj || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
1451138fd1498Szrj || !operand_equal_p (core1, core2, 0))
1451238fd1498Szrj return false;
1451338fd1498Szrj
1451438fd1498Szrj if (toffset1 && toffset2)
1451538fd1498Szrj {
1451638fd1498Szrj type = TREE_TYPE (toffset1);
1451738fd1498Szrj if (type != TREE_TYPE (toffset2))
1451838fd1498Szrj toffset2 = fold_convert (type, toffset2);
1451938fd1498Szrj
1452038fd1498Szrj tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
1452138fd1498Szrj if (!cst_and_fits_in_hwi (tdiff))
1452238fd1498Szrj return false;
1452338fd1498Szrj
1452438fd1498Szrj *diff = int_cst_value (tdiff);
1452538fd1498Szrj }
1452638fd1498Szrj else if (toffset1 || toffset2)
1452738fd1498Szrj {
1452838fd1498Szrj /* If only one of the offsets is non-constant, the difference cannot
1452938fd1498Szrj be a constant. */
1453038fd1498Szrj return false;
1453138fd1498Szrj }
1453238fd1498Szrj else
1453338fd1498Szrj *diff = 0;
1453438fd1498Szrj
1453538fd1498Szrj *diff += bytepos1 - bytepos2;
1453638fd1498Szrj return true;
1453738fd1498Szrj }
1453838fd1498Szrj
1453938fd1498Szrj /* Return OFF converted to a pointer offset type suitable as offset for
1454038fd1498Szrj POINTER_PLUS_EXPR. Use location LOC for this conversion. */
1454138fd1498Szrj tree
convert_to_ptrofftype_loc(location_t loc,tree off)1454238fd1498Szrj convert_to_ptrofftype_loc (location_t loc, tree off)
1454338fd1498Szrj {
1454438fd1498Szrj return fold_convert_loc (loc, sizetype, off);
1454538fd1498Szrj }
1454638fd1498Szrj
1454738fd1498Szrj /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
1454838fd1498Szrj tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)1454938fd1498Szrj fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
1455038fd1498Szrj {
1455138fd1498Szrj return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
1455238fd1498Szrj ptr, convert_to_ptrofftype_loc (loc, off));
1455338fd1498Szrj }
1455438fd1498Szrj
1455538fd1498Szrj /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
1455638fd1498Szrj tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)1455738fd1498Szrj fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
1455838fd1498Szrj {
1455938fd1498Szrj return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
1456038fd1498Szrj ptr, size_int (off));
1456138fd1498Szrj }
1456238fd1498Szrj
1456338fd1498Szrj /* Return a char pointer for a C string if it is a string constant
1456438fd1498Szrj or sum of string constant and integer constant. We only support
1456538fd1498Szrj string constants properly terminated with '\0' character.
1456638fd1498Szrj If STRLEN is a valid pointer, length (including terminating character)
1456738fd1498Szrj of returned string is stored to the argument. */
1456838fd1498Szrj
1456938fd1498Szrj const char *
c_getstr(tree src,unsigned HOST_WIDE_INT * strlen)1457038fd1498Szrj c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
1457138fd1498Szrj {
1457238fd1498Szrj tree offset_node;
1457338fd1498Szrj
1457438fd1498Szrj if (strlen)
1457538fd1498Szrj *strlen = 0;
1457638fd1498Szrj
1457738fd1498Szrj src = string_constant (src, &offset_node);
1457838fd1498Szrj if (src == 0)
1457938fd1498Szrj return NULL;
1458038fd1498Szrj
1458138fd1498Szrj unsigned HOST_WIDE_INT offset = 0;
1458238fd1498Szrj if (offset_node != NULL_TREE)
1458338fd1498Szrj {
1458438fd1498Szrj if (!tree_fits_uhwi_p (offset_node))
1458538fd1498Szrj return NULL;
1458638fd1498Szrj else
1458738fd1498Szrj offset = tree_to_uhwi (offset_node);
1458838fd1498Szrj }
1458938fd1498Szrj
1459038fd1498Szrj unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
1459138fd1498Szrj const char *string = TREE_STRING_POINTER (src);
1459238fd1498Szrj
1459338fd1498Szrj /* Support only properly null-terminated strings. */
1459438fd1498Szrj if (string_length == 0
1459538fd1498Szrj || string[string_length - 1] != '\0'
1459638fd1498Szrj || offset >= string_length)
1459738fd1498Szrj return NULL;
1459838fd1498Szrj
1459938fd1498Szrj if (strlen)
1460038fd1498Szrj *strlen = string_length - offset;
1460138fd1498Szrj return string + offset;
1460238fd1498Szrj }
1460338fd1498Szrj
1460438fd1498Szrj #if CHECKING_P
1460538fd1498Szrj
1460638fd1498Szrj namespace selftest {
1460738fd1498Szrj
1460838fd1498Szrj /* Helper functions for writing tests of folding trees. */
1460938fd1498Szrj
1461038fd1498Szrj /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
1461138fd1498Szrj
1461238fd1498Szrj static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)1461338fd1498Szrj assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
1461438fd1498Szrj tree constant)
1461538fd1498Szrj {
1461638fd1498Szrj ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
1461738fd1498Szrj }
1461838fd1498Szrj
1461938fd1498Szrj /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
1462038fd1498Szrj wrapping WRAPPED_EXPR. */
1462138fd1498Szrj
1462238fd1498Szrj static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)1462338fd1498Szrj assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
1462438fd1498Szrj tree wrapped_expr)
1462538fd1498Szrj {
1462638fd1498Szrj tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
1462738fd1498Szrj ASSERT_NE (wrapped_expr, result);
1462838fd1498Szrj ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
1462938fd1498Szrj ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
1463038fd1498Szrj }
1463138fd1498Szrj
1463238fd1498Szrj /* Verify that various arithmetic binary operations are folded
1463338fd1498Szrj correctly. */
1463438fd1498Szrj
1463538fd1498Szrj static void
test_arithmetic_folding()1463638fd1498Szrj test_arithmetic_folding ()
1463738fd1498Szrj {
1463838fd1498Szrj tree type = integer_type_node;
1463938fd1498Szrj tree x = create_tmp_var_raw (type, "x");
1464038fd1498Szrj tree zero = build_zero_cst (type);
1464138fd1498Szrj tree one = build_int_cst (type, 1);
1464238fd1498Szrj
1464338fd1498Szrj /* Addition. */
1464438fd1498Szrj /* 1 <-- (0 + 1) */
1464538fd1498Szrj assert_binop_folds_to_const (zero, PLUS_EXPR, one,
1464638fd1498Szrj one);
1464738fd1498Szrj assert_binop_folds_to_const (one, PLUS_EXPR, zero,
1464838fd1498Szrj one);
1464938fd1498Szrj
1465038fd1498Szrj /* (nonlvalue)x <-- (x + 0) */
1465138fd1498Szrj assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
1465238fd1498Szrj x);
1465338fd1498Szrj
1465438fd1498Szrj /* Subtraction. */
1465538fd1498Szrj /* 0 <-- (x - x) */
1465638fd1498Szrj assert_binop_folds_to_const (x, MINUS_EXPR, x,
1465738fd1498Szrj zero);
1465838fd1498Szrj assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
1465938fd1498Szrj x);
1466038fd1498Szrj
1466138fd1498Szrj /* Multiplication. */
1466238fd1498Szrj /* 0 <-- (x * 0) */
1466338fd1498Szrj assert_binop_folds_to_const (x, MULT_EXPR, zero,
1466438fd1498Szrj zero);
1466538fd1498Szrj
1466638fd1498Szrj /* (nonlvalue)x <-- (x * 1) */
1466738fd1498Szrj assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
1466838fd1498Szrj x);
1466938fd1498Szrj }
1467038fd1498Szrj
1467138fd1498Szrj /* Verify that various binary operations on vectors are folded
1467238fd1498Szrj correctly. */
1467338fd1498Szrj
1467438fd1498Szrj static void
test_vector_folding()1467538fd1498Szrj test_vector_folding ()
1467638fd1498Szrj {
1467738fd1498Szrj tree inner_type = integer_type_node;
1467838fd1498Szrj tree type = build_vector_type (inner_type, 4);
1467938fd1498Szrj tree zero = build_zero_cst (type);
1468038fd1498Szrj tree one = build_one_cst (type);
1468138fd1498Szrj
1468238fd1498Szrj /* Verify equality tests that return a scalar boolean result. */
1468338fd1498Szrj tree res_type = boolean_type_node;
1468438fd1498Szrj ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
1468538fd1498Szrj ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
1468638fd1498Szrj ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
1468738fd1498Szrj ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
1468838fd1498Szrj }
1468938fd1498Szrj
1469038fd1498Szrj /* Verify folding of VEC_DUPLICATE_EXPRs. */
1469138fd1498Szrj
1469238fd1498Szrj static void
test_vec_duplicate_folding()1469338fd1498Szrj test_vec_duplicate_folding ()
1469438fd1498Szrj {
1469538fd1498Szrj scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
1469638fd1498Szrj machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
1469738fd1498Szrj /* This will be 1 if VEC_MODE isn't a vector mode. */
1469838fd1498Szrj poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
1469938fd1498Szrj
1470038fd1498Szrj tree type = build_vector_type (ssizetype, nunits);
1470138fd1498Szrj tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
1470238fd1498Szrj tree dup5_cst = build_vector_from_val (type, ssize_int (5));
1470338fd1498Szrj ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
1470438fd1498Szrj }
1470538fd1498Szrj
1470638fd1498Szrj /* Run all of the selftests within this file. */
1470738fd1498Szrj
1470838fd1498Szrj void
fold_const_c_tests()1470938fd1498Szrj fold_const_c_tests ()
1471038fd1498Szrj {
1471138fd1498Szrj test_arithmetic_folding ();
1471238fd1498Szrj test_vector_folding ();
1471338fd1498Szrj test_vec_duplicate_folding ();
1471438fd1498Szrj }
1471538fd1498Szrj
1471638fd1498Szrj } // namespace selftest
1471738fd1498Szrj
1471838fd1498Szrj #endif /* CHECKING_P */
14719