このページを編集する際は,編集に関する方針に従ってください.[]
概要[]
引数[]
- enum tree_code code
- tree type
- tree op0
- tree op1
実装[]
7023 /* Fold a binary expression of code CODE and type TYPE with operands
7024 OP0 and OP1. Return the folded expression if folding is
7025 successful. Otherwise, return NULL_TREE. */
7026
7027 tree
7028 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7029 {
7030 tree t1 = NULL_TREE;
7031 tree tem;
7032 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7033 enum tree_code_class kind = TREE_CODE_CLASS (code);
7034
7035 /* WINS will be nonzero when the switch is done
7036 if all operands are constant. */
7037 int wins = 1;
7038
7039 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7040 && TREE_CODE_LENGTH (code) == 2);
7041
7042 arg0 = op0;
7043 arg1 = op1;
7044
7045 if (arg0)
7046 {
7047 tree subop;
7048
7049 /* Strip any conversions that don't change the mode. This is
7050 safe for every expression, except for a comparison expression
7051 because its signedness is derived from its operands. So, in
7052 the latter case, only strip conversions that don't change the
7053 signedness.
7054
7055 Note that this is done as an internal manipulation within the
7056 constant folder, in order to find the simplest representation
7057 of the arguments so that their form can be studied. In any
7058 cases, the appropriate type conversions should be put back in
7059 the tree that will get out of the constant folder. */
7060 if (kind == tcc_comparison)
7061 STRIP_SIGN_NOPS (arg0);
7062 else
7063 STRIP_NOPS (arg0);
7064
7065 if (TREE_CODE (arg0) == COMPLEX_CST)
7066 subop = TREE_REALPART (arg0);
7067 else
7068 subop = arg0;
7069
7070 if (TREE_CODE (subop) != INTEGER_CST
7071 && TREE_CODE (subop) != REAL_CST)
7072 /* Note that TREE_CONSTANT isn't enough:
7073 static var addresses are constant but we can't
7074 do arithmetic on them. */
7075 wins = 0;
7076 }
7077
7078 if (arg1)
7079 {
7080 tree subop;
7081
7082 /* Strip any conversions that don't change the mode. This is
7083 safe for every expression, except for a comparison expression
7084 because its signedness is derived from its operands. So, in
7085 the latter case, only strip conversions that don't change the
7086 signedness.
7087
7088 Note that this is done as an internal manipulation within the
7089 constant folder, in order to find the simplest representation
7090 of the arguments so that their form can be studied. In any
7091 cases, the appropriate type conversions should be put back in
7092 the tree that will get out of the constant folder. */
7093 if (kind == tcc_comparison)
7094 STRIP_SIGN_NOPS (arg1);
7095 else
7096 STRIP_NOPS (arg1);
7097
7098 if (TREE_CODE (arg1) == COMPLEX_CST)
7099 subop = TREE_REALPART (arg1);
7100 else
7101 subop = arg1;
7102
7103 if (TREE_CODE (subop) != INTEGER_CST
7104 && TREE_CODE (subop) != REAL_CST)
7105 /* Note that TREE_CONSTANT isn't enough:
7106 static var addresses are constant but we can't
7107 do arithmetic on them. */
7108 wins = 0;
7109 }
7110
7111 /* If this is a commutative operation, and ARG0 is a constant, move it
7112 to ARG1 to reduce the number of tests below. */
可換な命令(arg0とarg1を交換して計算しても結果が変わらない命令)であり、arg0が固定値であればarg0とarg1を交換して畳み込んでみる。
7113 if (commutative_tree_code (code)
7114 && tree_swap_operands_p (arg0, arg1, true))
7115 return fold_build2 (code, type, op1, op0);
7116
7117 /* Now WINS is set as described above,
7118 ARG0 is the first operand of EXPR,
7119 and ARG1 is the second operand (if it has more than one operand).
7120
7121 First check for cases where an arithmetic operation is applied to a
7122 compound, conditional, or comparison operation. Push the arithmetic
7123 operation inside the compound or conditional to see if any folding
7124 can then be done. Convert comparison to conditional for this purpose.
7125 The also optimizes non-constant cases that used to be done in
7126 expand_expr.
7127
7128 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7129 one of the operands is a comparison and the other is a comparison, a
7130 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7131 code below would make the expression more complex. Change it to a
7132 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7133 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7134
7135 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7136 || code == EQ_EXPR || code == NE_EXPR)
7137 && ((truth_value_p (TREE_CODE (arg0))
7138 && (truth_value_p (TREE_CODE (arg1))
7139 || (TREE_CODE (arg1) == BIT_AND_EXPR
7140 && integer_onep (TREE_OPERAND (arg1, 1)))))
7141 || (truth_value_p (TREE_CODE (arg1))
7142 && (truth_value_p (TREE_CODE (arg0))
7143 || (TREE_CODE (arg0) == BIT_AND_EXPR
7144 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7145 {
7146 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7147 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7148 : TRUTH_XOR_EXPR,
7149 boolean_type_node,
7150 fold_convert (boolean_type_node, arg0),
7151 fold_convert (boolean_type_node, arg1));
7152
7153 if (code == EQ_EXPR)
7154 tem = invert_truthvalue (tem);
7155
7156 return fold_convert (type, tem);
7157 }
7158
tcc_binary, tcc_comparison[]
7159 if (TREE_CODE_CLASS (code) == tcc_binary
7160 || TREE_CODE_CLASS (code) == tcc_comparison)
7161 {
7162 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7163 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7164 fold_build2 (code, type,
7165 TREE_OPERAND (arg0, 1), op1));
7166 if (TREE_CODE (arg1) == COMPOUND_EXPR
7167 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7168 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7169 fold_build2 (code, type,
7170 op0, TREE_OPERAND (arg1, 1)));
7171
7172 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7173 {
7174 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7175 arg0, arg1,
7176 /*cond_first_p=*/1);
7177 if (tem != NULL_TREE)
7178 return tem;
7179 }
7180
7181 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7182 {
7183 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7184 arg1, arg0,
7185 /*cond_first_p=*/0);
7186 if (tem != NULL_TREE)
7187 return tem;
7188 }
7189 }
7190
7191 switch (code)
7192 {
PLUS_EXPR[]
7193 case PLUS_EXPR:
7194 /* A + (-B) -> A - B */
7195 if (TREE_CODE (arg1) == NEGATE_EXPR)
7196 return fold_build2 (MINUS_EXPR, type,
7197 fold_convert (type, arg0),
7198 fold_convert (type, TREE_OPERAND (arg1, 0)));
7199 /* (-A) + B -> B - A */
7200 if (TREE_CODE (arg0) == NEGATE_EXPR
7201 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7202 return fold_build2 (MINUS_EXPR, type,
7203 fold_convert (type, arg1),
7204 fold_convert (type, TREE_OPERAND (arg0, 0)));
7205 /* Convert ~A + 1 to -A. */
7206 if (INTEGRAL_TYPE_P (type)
7207 && TREE_CODE (arg0) == BIT_NOT_EXPR
7208 && integer_onep (arg1))
7209 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7210
INTEGER[]
7211 if (! FLOAT_TYPE_P (type))
7212 {
7213 if (integer_zerop (arg1))
7214 return non_lvalue (fold_convert (type, arg0));
7215
7216 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7217 with a constant, and the two constants have no bits in common,
7218 we should treat this as a BIT_IOR_EXPR since this may produce more
7219 simplifications. */
7220 if (TREE_CODE (arg0) == BIT_AND_EXPR
7221 && TREE_CODE (arg1) == BIT_AND_EXPR
7222 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7223 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7224 && integer_zerop (const_binop (BIT_AND_EXPR,
7225 TREE_OPERAND (arg0, 1),
7226 TREE_OPERAND (arg1, 1), 0)))
7227 {
7228 code = BIT_IOR_EXPR;
7229 goto bit_ior;
7230 }
7231
7232 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7233 (plus (plus (mult) (mult)) (foo)) so that we can
7234 take advantage of the factoring cases below. */
7235 if (((TREE_CODE (arg0) == PLUS_EXPR
7236 || TREE_CODE (arg0) == MINUS_EXPR)
7237 && TREE_CODE (arg1) == MULT_EXPR)
7238 || ((TREE_CODE (arg1) == PLUS_EXPR
7239 || TREE_CODE (arg1) == MINUS_EXPR)
7240 && TREE_CODE (arg0) == MULT_EXPR))
7241 {
7242 tree parg0, parg1, parg, marg;
7243 enum tree_code pcode;
7244
7245 if (TREE_CODE (arg1) == MULT_EXPR)
7246 parg = arg0, marg = arg1;
7247 else
7248 parg = arg1, marg = arg0;
7249 pcode = TREE_CODE (parg);
7250 parg0 = TREE_OPERAND (parg, 0);
7251 parg1 = TREE_OPERAND (parg, 1);
7252 STRIP_NOPS (parg0);
7253 STRIP_NOPS (parg1);
7254
7255 if (TREE_CODE (parg0) == MULT_EXPR
7256 && TREE_CODE (parg1) != MULT_EXPR)
7257 return fold_build2 (pcode, type,
7258 fold_build2 (PLUS_EXPR, type,
7259 fold_convert (type, parg0),
7260 fold_convert (type, marg)),
7261 fold_convert (type, parg1));
7262 if (TREE_CODE (parg0) != MULT_EXPR
7263 && TREE_CODE (parg1) == MULT_EXPR)
7264 return fold_build2 (PLUS_EXPR, type,
7265 fold_convert (type, parg0),
7266 fold_build2 (pcode, type,
7267 fold_convert (type, marg),
7268 fold_convert (type,
7269 parg1)));
7270 }
7271
7272 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7273 {
7274 tree arg00, arg01, arg10, arg11;
7275 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7276
7277 /* (A * C) + (B * C) -> (A+B) * C.
7278 We are most concerned about the case where C is a constant,
7279 but other combinations show up during loop reduction. Since
7280 it is not difficult, try all four possibilities. */
7281
7282 arg00 = TREE_OPERAND (arg0, 0);
7283 arg01 = TREE_OPERAND (arg0, 1);
7284 arg10 = TREE_OPERAND (arg1, 0);
7285 arg11 = TREE_OPERAND (arg1, 1);
7286 same = NULL_TREE;
7287
7288 if (operand_equal_p (arg01, arg11, 0))
7289 same = arg01, alt0 = arg00, alt1 = arg10;
7290 else if (operand_equal_p (arg00, arg10, 0))
7291 same = arg00, alt0 = arg01, alt1 = arg11;
7292 else if (operand_equal_p (arg00, arg11, 0))
7293 same = arg00, alt0 = arg01, alt1 = arg10;
7294 else if (operand_equal_p (arg01, arg10, 0))
7295 same = arg01, alt0 = arg00, alt1 = arg11;
7296
7297 /* No identical multiplicands; see if we can find a common
7298 power-of-two factor in non-power-of-two multiplies. This
7299 can help in multi-dimensional array access. */
7300 else if (TREE_CODE (arg01) == INTEGER_CST
7301 && TREE_CODE (arg11) == INTEGER_CST
7302 && TREE_INT_CST_HIGH (arg01) == 0
7303 && TREE_INT_CST_HIGH (arg11) == 0)
7304 {
7305 HOST_WIDE_INT int01, int11, tmp;
7306 int01 = TREE_INT_CST_LOW (arg01);
7307 int11 = TREE_INT_CST_LOW (arg11);
7308
7309 /* Move min of absolute values to int11. */
7310 if ((int01 >= 0 ? int01 : -int01)
7311 < (int11 >= 0 ? int11 : -int11))
7312 {
7313 tmp = int01, int01 = int11, int11 = tmp;
7314 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7315 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7316 }
7317
7318 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7319 {
7320 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7321 build_int_cst (NULL_TREE,
7322 int01 / int11));
7323 alt1 = arg10;
7324 same = arg11;
7325 }
7326 }
7327
7328 if (same)
7329 return fold_build2 (MULT_EXPR, type,
7330 fold_build2 (PLUS_EXPR, type,
7331 fold_convert (type, alt0),
7332 fold_convert (type, alt1)),
7333 fold_convert (type, same));
7334 }
7335
7336 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7337 of the array. Loop optimizer sometimes produce this type of
7338 expressions. */
7339 if (TREE_CODE (arg0) == ADDR_EXPR)
7340 {
7341 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7342 if (tem)
7343 return fold_convert (type, tem);
7344 }
7345 else if (TREE_CODE (arg1) == ADDR_EXPR)
7346 {
7347 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7348 if (tem)
7349 return fold_convert (type, tem);
7350 }
7351 }
REAL[]
7352 else
7353 {
7354 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7355 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7356 return non_lvalue (fold_convert (type, arg0));
7357
7358 /* Likewise if the operands are reversed. */
7359 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7360 return non_lvalue (fold_convert (type, arg1));
7361
7362 /* Convert X + -C into X - C. */
7363 if (TREE_CODE (arg1) == REAL_CST
7364 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7365 {
7366 tem = fold_negate_const (arg1, type);
7367 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7368 return fold_build2 (MINUS_EXPR, type,
7369 fold_convert (type, arg0),
7370 fold_convert (type, tem));
7371 }
7372
7373 if (flag_unsafe_math_optimizations
7374 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7375 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7376 && (tem = distribute_real_division (code, type, arg0, arg1)))
7377 return tem;
7378
7379 /* Convert x+x into x*2.0. */
7380 if (operand_equal_p (arg0, arg1, 0)
7381 && SCALAR_FLOAT_TYPE_P (type))
7382 return fold_build2 (MULT_EXPR, type, arg0,
7383 build_real (type, dconst2));
7384
7385 /* Convert x*c+x into x*(c+1). */
7386 if (flag_unsafe_math_optimizations
7387 && TREE_CODE (arg0) == MULT_EXPR
7388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7389 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7391 {
7392 REAL_VALUE_TYPE c;
7393
7394 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7395 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7396 return fold_build2 (MULT_EXPR, type, arg1,
7397 build_real (type, c));
7398 }
7399
7400 /* Convert x+x*c into x*(c+1). */
7401 if (flag_unsafe_math_optimizations
7402 && TREE_CODE (arg1) == MULT_EXPR
7403 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7404 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7405 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7406 {
7407 REAL_VALUE_TYPE c;
7408
7409 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7410 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7411 return fold_build2 (MULT_EXPR, type, arg0,
7412 build_real (type, c));
7413 }
7414
7415 /* Convert x*c1+x*c2 into x*(c1+c2). */
7416 if (flag_unsafe_math_optimizations
7417 && TREE_CODE (arg0) == MULT_EXPR
7418 && TREE_CODE (arg1) == MULT_EXPR
7419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7420 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7421 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7422 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7423 && operand_equal_p (TREE_OPERAND (arg0, 0),
7424 TREE_OPERAND (arg1, 0), 0))
7425 {
7426 REAL_VALUE_TYPE c1, c2;
7427
7428 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7429 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7430 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7431 return fold_build2 (MULT_EXPR, type,
7432 TREE_OPERAND (arg0, 0),
7433 build_real (type, c1));
7434 }
7435 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7436 if (flag_unsafe_math_optimizations
7437 && TREE_CODE (arg1) == PLUS_EXPR
7438 && TREE_CODE (arg0) != MULT_EXPR)
7439 {
7440 tree tree10 = TREE_OPERAND (arg1, 0);
7441 tree tree11 = TREE_OPERAND (arg1, 1);
7442 if (TREE_CODE (tree11) == MULT_EXPR
7443 && TREE_CODE (tree10) == MULT_EXPR)
7444 {
7445 tree tree0;
7446 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7447 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7448 }
7449 }
7450 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7451 if (flag_unsafe_math_optimizations
7452 && TREE_CODE (arg0) == PLUS_EXPR
7453 && TREE_CODE (arg1) != MULT_EXPR)
7454 {
7455 tree tree00 = TREE_OPERAND (arg0, 0);
7456 tree tree01 = TREE_OPERAND (arg0, 1);
7457 if (TREE_CODE (tree01) == MULT_EXPR
7458 && TREE_CODE (tree00) == MULT_EXPR)
7459 {
7460 tree tree0;
7461 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7462 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7463 }
7464 }
7465 }
7466
7467 bit_rotate:
7468 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7469 is a rotate of A by C1 bits. */
7470 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7471 is a rotate of A by B bits. */
7472 {
7473 enum tree_code code0, code1;
7474 code0 = TREE_CODE (arg0);
7475 code1 = TREE_CODE (arg1);
7476 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7477 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7478 && operand_equal_p (TREE_OPERAND (arg0, 0),
7479 TREE_OPERAND (arg1, 0), 0)
7480 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7481 {
7482 tree tree01, tree11;
7483 enum tree_code code01, code11;
7484
7485 tree01 = TREE_OPERAND (arg0, 1);
7486 tree11 = TREE_OPERAND (arg1, 1);
7487 STRIP_NOPS (tree01);
7488 STRIP_NOPS (tree11);
7489 code01 = TREE_CODE (tree01);
7490 code11 = TREE_CODE (tree11);
7491 if (code01 == INTEGER_CST
7492 && code11 == INTEGER_CST
7493 && TREE_INT_CST_HIGH (tree01) == 0
7494 && TREE_INT_CST_HIGH (tree11) == 0
7495 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7496 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7497 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7498 code0 == LSHIFT_EXPR ? tree01 : tree11);
7499 else if (code11 == MINUS_EXPR)
7500 {
7501 tree tree110, tree111;
7502 tree110 = TREE_OPERAND (tree11, 0);
7503 tree111 = TREE_OPERAND (tree11, 1);
7504 STRIP_NOPS (tree110);
7505 STRIP_NOPS (tree111);
7506 if (TREE_CODE (tree110) == INTEGER_CST
7507 && 0 == compare_tree_int (tree110,
7508 TYPE_PRECISION
7509 (TREE_TYPE (TREE_OPERAND
7510 (arg0, 0))))
7511 && operand_equal_p (tree01, tree111, 0))
7512 return build2 ((code0 == LSHIFT_EXPR
7513 ? LROTATE_EXPR
7514 : RROTATE_EXPR),
7515 type, TREE_OPERAND (arg0, 0), tree01);
7516 }
7517 else if (code01 == MINUS_EXPR)
7518 {
7519 tree tree010, tree011;
7520 tree010 = TREE_OPERAND (tree01, 0);
7521 tree011 = TREE_OPERAND (tree01, 1);
7522 STRIP_NOPS (tree010);
7523 STRIP_NOPS (tree011);
7524 if (TREE_CODE (tree010) == INTEGER_CST
7525 && 0 == compare_tree_int (tree010,
7526 TYPE_PRECISION
7527 (TREE_TYPE (TREE_OPERAND
7528 (arg0, 0))))
7529 && operand_equal_p (tree11, tree011, 0))
7530 return build2 ((code0 != LSHIFT_EXPR
7531 ? LROTATE_EXPR
7532 : RROTATE_EXPR),
7533 type, TREE_OPERAND (arg0, 0), tree11);
7534 }
7535 }
7536 }
7537
7538 associate:
7539 /* In most languages, can't associate operations on floats through
7540 parentheses. Rather than remember where the parentheses were, we
7541 don't associate floats at all, unless the user has specified
7542 -funsafe-math-optimizations. */
7543
7544 if (! wins
7545 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7546 {
7547 tree var0, con0, lit0, minus_lit0;
7548 tree var1, con1, lit1, minus_lit1;
7549
7550 /* Split both trees into variables, constants, and literals. Then
7551 associate each group together, the constants with literals,
7552 then the result with variables. This increases the chances of
7553 literals being recombined later and of generating relocatable
7554 expressions for the sum of a constant and literal. */
7555 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7556 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7557 code == MINUS_EXPR);
7558
7559 /* Only do something if we found more than two objects. Otherwise,
7560 nothing has changed and we risk infinite recursion. */
7561 if (2 < ((var0 != 0) + (var1 != 0)
7562 + (con0 != 0) + (con1 != 0)
7563 + (lit0 != 0) + (lit1 != 0)
7564 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7565 {
7566 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7567 if (code == MINUS_EXPR)
7568 code = PLUS_EXPR;
7569
7570 var0 = associate_trees (var0, var1, code, type);
7571 con0 = associate_trees (con0, con1, code, type);
7572 lit0 = associate_trees (lit0, lit1, code, type);
7573 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7574
7575 /* Preserve the MINUS_EXPR if the negative part of the literal is
7576 greater than the positive part. Otherwise, the multiplicative
7577 folding code (i.e extract_muldiv) may be fooled in case
7578 unsigned constants are subtracted, like in the following
7579 example: ((X*2 + 4) - 8U)/2. */
7580 if (minus_lit0 && lit0)
7581 {
7582 if (TREE_CODE (lit0) == INTEGER_CST
7583 && TREE_CODE (minus_lit0) == INTEGER_CST
7584 && tree_int_cst_lt (lit0, minus_lit0))
7585 {
7586 minus_lit0 = associate_trees (minus_lit0, lit0,
7587 MINUS_EXPR, type);
7588 lit0 = 0;
7589 }
7590 else
7591 {
7592 lit0 = associate_trees (lit0, minus_lit0,
7593 MINUS_EXPR, type);
7594 minus_lit0 = 0;
7595 }
7596 }
7597 if (minus_lit0)
7598 {
7599 if (con0 == 0)
7600 return fold_convert (type,
7601 associate_trees (var0, minus_lit0,
7602 MINUS_EXPR, type));
7603 else
7604 {
7605 con0 = associate_trees (con0, minus_lit0,
7606 MINUS_EXPR, type);
7607 return fold_convert (type,
7608 associate_trees (var0, con0,
7609 PLUS_EXPR, type));
7610 }
7611 }
7612
7613 con0 = associate_trees (con0, lit0, code, type);
7614 return fold_convert (type, associate_trees (var0, con0,
7615 code, type));
7616 }
7617 }
7618
7619 binary:
7620 if (wins)
7621 t1 = const_binop (code, arg0, arg1, 0);
7622 if (t1 != NULL_TREE)
7623 {
7624 /* The return value should always have
7625 the same type as the original expression. */
7626 if (TREE_TYPE (t1) != type)
7627 t1 = fold_convert (type, t1);
7628
7629 return t1;
7630 }
7631 return NULL_TREE;
7632
MINUS_EXPR[]
7633 case MINUS_EXPR:
7634 /* A - (-B) -> A + B */
7635 if (TREE_CODE (arg1) == NEGATE_EXPR)
7636 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7637 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7638 if (TREE_CODE (arg0) == NEGATE_EXPR
7639 && (FLOAT_TYPE_P (type)
7640 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7641 && negate_expr_p (arg1)
7642 && reorder_operands_p (arg0, arg1))
7643 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7644 TREE_OPERAND (arg0, 0));
7645 /* Convert -A - 1 to ~A. */
7646 if (INTEGRAL_TYPE_P (type)
7647 && TREE_CODE (arg0) == NEGATE_EXPR
7648 && integer_onep (arg1))
7649 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7650
7651 /* Convert -1 - A to ~A. */
7652 if (INTEGRAL_TYPE_P (type)
7653 && integer_all_onesp (arg0))
7654 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7655
7656 if (! FLOAT_TYPE_P (type))
7657 {
7658 if (! wins && integer_zerop (arg0))
7659 return negate_expr (fold_convert (type, arg1));
7660 if (integer_zerop (arg1))
7661 return non_lvalue (fold_convert (type, arg0));
7662
7663 /* Fold A - (A & B) into ~B & A. */
7664 if (!TREE_SIDE_EFFECTS (arg0)
7665 && TREE_CODE (arg1) == BIT_AND_EXPR)
7666 {
7667 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7668 return fold_build2 (BIT_AND_EXPR, type,
7669 fold_build1 (BIT_NOT_EXPR, type,
7670 TREE_OPERAND (arg1, 0)),
7671 arg0);
7672 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7673 return fold_build2 (BIT_AND_EXPR, type,
7674 fold_build1 (BIT_NOT_EXPR, type,
7675 TREE_OPERAND (arg1, 1)),
7676 arg0);
7677 }
7678
7679 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7680 any power of 2 minus 1. */
7681 if (TREE_CODE (arg0) == BIT_AND_EXPR
7682 && TREE_CODE (arg1) == BIT_AND_EXPR
7683 && operand_equal_p (TREE_OPERAND (arg0, 0),
7684 TREE_OPERAND (arg1, 0), 0))
7685 {
7686 tree mask0 = TREE_OPERAND (arg0, 1);
7687 tree mask1 = TREE_OPERAND (arg1, 1);
7688 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7689
7690 if (operand_equal_p (tem, mask1, 0))
7691 {
7692 tem = fold_build2 (BIT_XOR_EXPR, type,
7693 TREE_OPERAND (arg0, 0), mask1);
7694 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7695 }
7696 }
7697 }
7698
7699 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7700 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7701 return non_lvalue (fold_convert (type, arg0));
7702
7703 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7704 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7705 (-ARG1 + ARG0) reduces to -ARG1. */
7706 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7707 return negate_expr (fold_convert (type, arg1));
7708
7709 /* Fold &x - &x. This can happen from &x.foo - &x.
7710 This is unsafe for certain floats even in non-IEEE formats.
7711 In IEEE, it is unsafe because it does wrong for NaNs.
7712 Also note that operand_equal_p is always false if an operand
7713 is volatile. */
7714
7715 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7716 && operand_equal_p (arg0, arg1, 0))
7717 return fold_convert (type, integer_zero_node);
7718
7719 /* A - B -> A + (-B) if B is easily negatable. */
7720 if (!wins && negate_expr_p (arg1)
7721 && ((FLOAT_TYPE_P (type)
7722 /* Avoid this transformation if B is a positive REAL_CST. */
7723 && (TREE_CODE (arg1) != REAL_CST
7724 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7725 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7726 return fold_build2 (PLUS_EXPR, type,
7727 fold_convert (type, arg0),
7728 fold_convert (type, negate_expr (arg1)));
7729
7730 /* Try folding difference of addresses. */
7731 {
7732 HOST_WIDE_INT diff;
7733
7734 if ((TREE_CODE (arg0) == ADDR_EXPR
7735 || TREE_CODE (arg1) == ADDR_EXPR)
7736 && ptr_difference_const (arg0, arg1, &diff))
7737 return build_int_cst_type (type, diff);
7738 }
7739
7740 /* Fold &a[i] - &a[j] to i-j. */
7741 if (TREE_CODE (arg0) == ADDR_EXPR
7742 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7743 && TREE_CODE (arg1) == ADDR_EXPR
7744 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7745 {
7746 tree aref0 = TREE_OPERAND (arg0, 0);
7747 tree aref1 = TREE_OPERAND (arg1, 0);
7748 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7749 TREE_OPERAND (aref1, 0), 0))
7750 {
7751 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7752 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7753 tree esz = array_ref_element_size (aref0);
7754 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7755 return fold_build2 (MULT_EXPR, type, diff,
7756 fold_convert (type, esz));
7757
7758 }
7759 }
7760
7761 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7762 of the array. Loop optimizer sometimes produce this type of
7763 expressions. */
7764 if (TREE_CODE (arg0) == ADDR_EXPR)
7765 {
7766 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7767 if (tem)
7768 return fold_convert (type, tem);
7769 }
7770
7771 if (flag_unsafe_math_optimizations
7772 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7773 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7774 && (tem = distribute_real_division (code, type, arg0, arg1)))
7775 return tem;
7776
7777 if (TREE_CODE (arg0) == MULT_EXPR
7778 && TREE_CODE (arg1) == MULT_EXPR
7779 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7780 {
7781 /* (A * C) - (B * C) -> (A-B) * C. */
7782 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7783 TREE_OPERAND (arg1, 1), 0))
7784 return fold_build2 (MULT_EXPR, type,
7785 fold_build2 (MINUS_EXPR, type,
7786 TREE_OPERAND (arg0, 0),
7787 TREE_OPERAND (arg1, 0)),
7788 TREE_OPERAND (arg0, 1));
7789 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7790 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7791 TREE_OPERAND (arg1, 0), 0))
7792 return fold_build2 (MULT_EXPR, type,
7793 TREE_OPERAND (arg0, 0),
7794 fold_build2 (MINUS_EXPR, type,
7795 TREE_OPERAND (arg0, 1),
7796 TREE_OPERAND (arg1, 1)));
7797 }
7798
7799 goto associate;
7800
MULT_EXPR[]
7801 case MULT_EXPR:
7802 /* (-A) * (-B) -> A * B */
7803 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7804 return fold_build2 (MULT_EXPR, type,
7805 TREE_OPERAND (arg0, 0),
7806 negate_expr (arg1));
7807 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7808 return fold_build2 (MULT_EXPR, type,
7809 negate_expr (arg0),
7810 TREE_OPERAND (arg1, 0));
7811
INTEGER[]
7812 if (! FLOAT_TYPE_P (type))
7813 {
7814 if (integer_zerop (arg1))
7815 return omit_one_operand (type, arg1, arg0);
7816 if (integer_onep (arg1))
7817 return non_lvalue (fold_convert (type, arg0));
7818 /* Transform x * -1 into -x. */
7819 if (integer_all_onesp (arg1))
7820 return fold_convert (type, negate_expr (arg0));
7821
7822 /* (a * (1 << b)) is (a << b) */
7823 if (TREE_CODE (arg1) == LSHIFT_EXPR
7824 && integer_onep (TREE_OPERAND (arg1, 0)))
7825 return fold_build2 (LSHIFT_EXPR, type, arg0,
7826 TREE_OPERAND (arg1, 1));
7827 if (TREE_CODE (arg0) == LSHIFT_EXPR
7828 && integer_onep (TREE_OPERAND (arg0, 0)))
7829 return fold_build2 (LSHIFT_EXPR, type, arg1,
7830 TREE_OPERAND (arg0, 1));
7831
7832 if (TREE_CODE (arg1) == INTEGER_CST
7833 && 0 != (tem = extract_muldiv (op0,
7834 fold_convert (type, arg1),
7835 code, NULL_TREE)))
7836 return fold_convert (type, tem);
7837
7838 }
REAL[]
7839 else
7840 {
7841 /* Maybe fold x * 0 to 0. The expressions aren't the same
7842 when x is NaN, since x * 0 is also NaN. Nor are they the
7843 same in modes with signed zeros, since multiplying a
7844 negative value by 0 gives -0, not +0. */
7845 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7846 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7847 && real_zerop (arg1))
7848 return omit_one_operand (type, arg1, arg0);
7849 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7850 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7851 && real_onep (arg1))
7852 return non_lvalue (fold_convert (type, arg0));
7853
7854 /* Transform x * -1.0 into -x. */
7855 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7856 && real_minus_onep (arg1))
7857 return fold_convert (type, negate_expr (arg0));
7858
7859 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7860 if (flag_unsafe_math_optimizations
7861 && TREE_CODE (arg0) == RDIV_EXPR
7862 && TREE_CODE (arg1) == REAL_CST
7863 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7864 {
7865 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7866 arg1, 0);
7867 if (tem)
7868 return fold_build2 (RDIV_EXPR, type, tem,
7869 TREE_OPERAND (arg0, 1));
7870 }
7871
7872 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7873 if (operand_equal_p (arg0, arg1, 0))
7874 {
7875 tree tem = fold_strip_sign_ops (arg0);
7876 if (tem != NULL_TREE)
7877 {
7878 tem = fold_convert (type, tem);
7879 return fold_build2 (MULT_EXPR, type, tem, tem);
7880 }
7881 }
7882
7883 if (flag_unsafe_math_optimizations)
7884 {
7885 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7886 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7887
7888 /* Optimizations of root(...)*root(...). */
7889 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7890 {
7891 tree rootfn, arg, arglist;
7892 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7893 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7894
7895 /* Optimize sqrt(x)*sqrt(x) as x. */
7896 if (BUILTIN_SQRT_P (fcode0)
7897 && operand_equal_p (arg00, arg10, 0)
7898 && ! HONOR_SNANS (TYPE_MODE (type)))
7899 return arg00;
7900
7901 /* Optimize root(x)*root(y) as root(x*y). */
7902 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7903 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7904 arglist = build_tree_list (NULL_TREE, arg);
7905 return build_function_call_expr (rootfn, arglist);
7906 }
7907
7908 /* Optimize expN(x)*expN(y) as expN(x+y). */
7909 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7910 {
7911 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7912 tree arg = fold_build2 (PLUS_EXPR, type,
7913 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7914 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7915 tree arglist = build_tree_list (NULL_TREE, arg);
7916 return build_function_call_expr (expfn, arglist);
7917 }
7918
7919 /* Optimizations of pow(...)*pow(...). */
7920 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7921 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7922 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7923 {
7924 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7925 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7926 1)));
7927 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7928 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7929 1)));
7930
7931 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7932 if (operand_equal_p (arg01, arg11, 0))
7933 {
7934 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7935 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7936 tree arglist = tree_cons (NULL_TREE, arg,
7937 build_tree_list (NULL_TREE,
7938 arg01));
7939 return build_function_call_expr (powfn, arglist);
7940 }
7941
7942 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7943 if (operand_equal_p (arg00, arg10, 0))
7944 {
7945 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7946 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7947 tree arglist = tree_cons (NULL_TREE, arg00,
7948 build_tree_list (NULL_TREE,
7949 arg));
7950 return build_function_call_expr (powfn, arglist);
7951 }
7952 }
7953
7954 /* Optimize tan(x)*cos(x) as sin(x). */
7955 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7956 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7957 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7958 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7959 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7960 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7961 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7962 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7963 {
7964 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7965
7966 if (sinfn != NULL_TREE)
7967 return build_function_call_expr (sinfn,
7968 TREE_OPERAND (arg0, 1));
7969 }
7970
7971 /* Optimize x*pow(x,c) as pow(x,c+1). */
7972 if (fcode1 == BUILT_IN_POW
7973 || fcode1 == BUILT_IN_POWF
7974 || fcode1 == BUILT_IN_POWL)
7975 {
7976 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7977 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7978 1)));
7979 if (TREE_CODE (arg11) == REAL_CST
7980 && ! TREE_CONSTANT_OVERFLOW (arg11)
7981 && operand_equal_p (arg0, arg10, 0))
7982 {
7983 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7984 REAL_VALUE_TYPE c;
7985 tree arg, arglist;
7986
7987 c = TREE_REAL_CST (arg11);
7988 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7989 arg = build_real (type, c);
7990 arglist = build_tree_list (NULL_TREE, arg);
7991 arglist = tree_cons (NULL_TREE, arg0, arglist);
7992 return build_function_call_expr (powfn, arglist);
7993 }
7994 }
7995
7996 /* Optimize pow(x,c)*x as pow(x,c+1). */
7997 if (fcode0 == BUILT_IN_POW
7998 || fcode0 == BUILT_IN_POWF
7999 || fcode0 == BUILT_IN_POWL)
8000 {
8001 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8002 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8003 1)));
8004 if (TREE_CODE (arg01) == REAL_CST
8005 && ! TREE_CONSTANT_OVERFLOW (arg01)
8006 && operand_equal_p (arg1, arg00, 0))
8007 {
8008 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8009 REAL_VALUE_TYPE c;
8010 tree arg, arglist;
8011
8012 c = TREE_REAL_CST (arg01);
8013 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8014 arg = build_real (type, c);
8015 arglist = build_tree_list (NULL_TREE, arg);
8016 arglist = tree_cons (NULL_TREE, arg1, arglist);
8017 return build_function_call_expr (powfn, arglist);
8018 }
8019 }
8020
8021 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8022 if (! optimize_size
8023 && operand_equal_p (arg0, arg1, 0))
8024 {
8025 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8026
8027 if (powfn)
8028 {
8029 tree arg = build_real (type, dconst2);
8030 tree arglist = build_tree_list (NULL_TREE, arg);
8031 arglist = tree_cons (NULL_TREE, arg0, arglist);
8032 return build_function_call_expr (powfn, arglist);
8033 }
8034 }
8035 }
8036 }
8037 goto associate;
8038
BIT_IOR_EXPR[]
8039 case BIT_IOR_EXPR:
8040 bit_ior:
8041 if (integer_all_onesp (arg1))
8042 return omit_one_operand (type, arg1, arg0);
8043 if (integer_zerop (arg1))
8044 return non_lvalue (fold_convert (type, arg0));
8045 if (operand_equal_p (arg0, arg1, 0))
8046 return non_lvalue (fold_convert (type, arg0));
8047
8048 /* ~X | X is -1. */
8049 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8050 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8051 {
8052 t1 = build_int_cst (type, -1);
8053 t1 = force_fit_type (t1, 0, false, false);
8054 return omit_one_operand (type, t1, arg1);
8055 }
8056
8057 /* X | ~X is -1. */
8058 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8059 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8060 {
8061 t1 = build_int_cst (type, -1);
8062 t1 = force_fit_type (t1, 0, false, false);
8063 return omit_one_operand (type, t1, arg0);
8064 }
8065
8066 t1 = distribute_bit_expr (code, type, arg0, arg1);
8067 if (t1 != NULL_TREE)
8068 return t1;
8069
8070 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8071
8072 This results in more efficient code for machines without a NAND
8073 instruction. Combine will canonicalize to the first form
8074 which will allow use of NAND instructions provided by the
8075 backend if they exist. */
8076 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8077 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8078 {
8079 return fold_build1 (BIT_NOT_EXPR, type,
8080 build2 (BIT_AND_EXPR, type,
8081 TREE_OPERAND (arg0, 0),
8082 TREE_OPERAND (arg1, 0)));
8083 }
8084
8085 /* See if this can be simplified into a rotate first. If that
8086 is unsuccessful continue in the association code. */
8087 goto bit_rotate;
8088
BIT_XOR_EXPR[]
8089 case BIT_XOR_EXPR:
8090 if (integer_zerop (arg1))
8091 return non_lvalue (fold_convert (type, arg0));
8092 if (integer_all_onesp (arg1))
8093 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8094 if (operand_equal_p (arg0, arg1, 0))
8095 return omit_one_operand (type, integer_zero_node, arg0);
8096
8097 /* ~X ^ X is -1. */
8098 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8099 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8100 {
8101 t1 = build_int_cst (type, -1);
8102 t1 = force_fit_type (t1, 0, false, false);
8103 return omit_one_operand (type, t1, arg1);
8104 }
8105
8106 /* X ^ ~X is -1. */
8107 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8108 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8109 {
8110 t1 = build_int_cst (type, -1);
8111 t1 = force_fit_type (t1, 0, false, false);
8112 return omit_one_operand (type, t1, arg0);
8113 }
8114
8115 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8116 with a constant, and the two constants have no bits in common,
8117 we should treat this as a BIT_IOR_EXPR since this may produce more
8118 simplifications. */
8119 if (TREE_CODE (arg0) == BIT_AND_EXPR
8120 && TREE_CODE (arg1) == BIT_AND_EXPR
8121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8122 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8123 && integer_zerop (const_binop (BIT_AND_EXPR,
8124 TREE_OPERAND (arg0, 1),
8125 TREE_OPERAND (arg1, 1), 0)))
8126 {
8127 code = BIT_IOR_EXPR;
8128 goto bit_ior;
8129 }
8130
8131 /* (X | Y) ^ X -> Y & ~ X*/
8132 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8134 {
8135 tree t2 = TREE_OPERAND (arg0, 1);
8136 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8137 arg1);
8138 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8139 fold_convert (type, t1));
8140 return t1;
8141 }
8142
8143 /* (Y | X) ^ X -> Y & ~ X*/
8144 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8145 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8146 {
8147 tree t2 = TREE_OPERAND (arg0, 0);
8148 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8149 arg1);
8150 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8151 fold_convert (type, t1));
8152 return t1;
8153 }
8154
8155 /* X ^ (X | Y) -> Y & ~ X*/
8156 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8157 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8158 {
8159 tree t2 = TREE_OPERAND (arg1, 1);
8160 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8161 arg0);
8162 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8163 fold_convert (type, t1));
8164 return t1;
8165 }
8166
8167 /* X ^ (Y | X) -> Y & ~ X*/
8168 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8169 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8170 {
8171 tree t2 = TREE_OPERAND (arg1, 0);
8172 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8173 arg0);
8174 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8175 fold_convert (type, t1));
8176 return t1;
8177 }
8178
8179 /* Convert ~X ^ ~Y to X ^ Y. */
8180 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8181 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8182 return fold_build2 (code, type,
8183 fold_convert (type, TREE_OPERAND (arg0, 0)),
8184 fold_convert (type, TREE_OPERAND (arg1, 0)));
8185
8186 /* See if this can be simplified into a rotate first. If that
8187 is unsuccessful continue in the association code. */
8188 goto bit_rotate;
8189
BIT_AND_EXPR[]
8190 case BIT_AND_EXPR:
8191 if (integer_all_onesp (arg1))
8192 return non_lvalue (fold_convert (type, arg0));
8193 if (integer_zerop (arg1))
8194 return omit_one_operand (type, arg1, arg0);
8195 if (operand_equal_p (arg0, arg1, 0))
8196 return non_lvalue (fold_convert (type, arg0));
8197
8198 /* ~X & X is always zero. */
8199 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8201 return omit_one_operand (type, integer_zero_node, arg1);
8202
8203 /* X & ~X is always zero. */
8204 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8206 return omit_one_operand (type, integer_zero_node, arg0);
8207
8208 t1 = distribute_bit_expr (code, type, arg0, arg1);
8209 if (t1 != NULL_TREE)
8210 return t1;
8211 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8212 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8213 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8214 {
8215 unsigned int prec
8216 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8217
8218 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8219 && (~TREE_INT_CST_LOW (arg1)
8220 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8221 return fold_convert (type, TREE_OPERAND (arg0, 0));
8222 }
8223
8224 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8225
8226 This results in more efficient code for machines without a NOR
8227 instruction. Combine will canonicalize to the first form
8228 which will allow use of NOR instructions provided by the
8229 backend if they exist. */
8230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8231 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8232 {
8233 return fold_build1 (BIT_NOT_EXPR, type,
8234 build2 (BIT_IOR_EXPR, type,
8235 TREE_OPERAND (arg0, 0),
8236 TREE_OPERAND (arg1, 0)));
8237 }
8238
8239 goto associate;
8240
RDIV_EXPR[]
8241 case RDIV_EXPR:
8242 /* Don't touch a floating-point divide by zero unless the mode
8243 of the constant can represent infinity. */
8244 if (TREE_CODE (arg1) == REAL_CST
8245 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8246 && real_zerop (arg1))
8247 return NULL_TREE;
8248
8249 /* (-A) / (-B) -> A / B */
8250 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8251 return fold_build2 (RDIV_EXPR, type,
8252 TREE_OPERAND (arg0, 0),
8253 negate_expr (arg1));
8254 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8255 return fold_build2 (RDIV_EXPR, type,
8256 negate_expr (arg0),
8257 TREE_OPERAND (arg1, 0));
8258
8259 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8260 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8261 && real_onep (arg1))
8262 return non_lvalue (fold_convert (type, arg0));
8263
8264 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8265 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8266 && real_minus_onep (arg1))
8267 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8268
8269 /* If ARG1 is a constant, we can convert this to a multiply by the
8270 reciprocal. This does not have the same rounding properties,
8271 so only do this if -funsafe-math-optimizations. We can actually
8272 always safely do it if ARG1 is a power of two, but it's hard to
8273 tell if it is or not in a portable manner. */
8274 if (TREE_CODE (arg1) == REAL_CST)
8275 {
8276 if (flag_unsafe_math_optimizations
8277 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8278 arg1, 0)))
8279 return fold_build2 (MULT_EXPR, type, arg0, tem);
8280 /* Find the reciprocal if optimizing and the result is exact. */
8281 if (optimize)
8282 {
8283 REAL_VALUE_TYPE r;
8284 r = TREE_REAL_CST (arg1);
8285 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8286 {
8287 tem = build_real (type, r);
8288 return fold_build2 (MULT_EXPR, type,
8289 fold_convert (type, arg0), tem);
8290 }
8291 }
8292 }
8293 /* Convert A/B/C to A/(B*C). */
8294 if (flag_unsafe_math_optimizations
8295 && TREE_CODE (arg0) == RDIV_EXPR)
8296 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8297 fold_build2 (MULT_EXPR, type,
8298 TREE_OPERAND (arg0, 1), arg1));
8299
8300 /* Convert A/(B/C) to (A/B)*C. */
8301 if (flag_unsafe_math_optimizations
8302 && TREE_CODE (arg1) == RDIV_EXPR)
8303 return fold_build2 (MULT_EXPR, type,
8304 fold_build2 (RDIV_EXPR, type, arg0,
8305 TREE_OPERAND (arg1, 0)),
8306 TREE_OPERAND (arg1, 1));
8307
8308 /* Convert C1/(X*C2) into (C1/C2)/X. */
8309 if (flag_unsafe_math_optimizations
8310 && TREE_CODE (arg1) == MULT_EXPR
8311 && TREE_CODE (arg0) == REAL_CST
8312 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8313 {
8314 tree tem = const_binop (RDIV_EXPR, arg0,
8315 TREE_OPERAND (arg1, 1), 0);
8316 if (tem)
8317 return fold_build2 (RDIV_EXPR, type, tem,
8318 TREE_OPERAND (arg1, 0));
8319 }
8320
8321 if (flag_unsafe_math_optimizations)
8322 {
8323 enum built_in_function fcode = builtin_mathfn_code (arg1);
8324 /* Optimize x/expN(y) into x*expN(-y). */
8325 if (BUILTIN_EXPONENT_P (fcode))
8326 {
8327 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8328 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8329 tree arglist = build_tree_list (NULL_TREE,
8330 fold_convert (type, arg));
8331 arg1 = build_function_call_expr (expfn, arglist);
8332 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8333 }
8334
8335 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8336 if (fcode == BUILT_IN_POW
8337 || fcode == BUILT_IN_POWF
8338 || fcode == BUILT_IN_POWL)
8339 {
8340 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8341 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8342 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8343 tree neg11 = fold_convert (type, negate_expr (arg11));
8344 tree arglist = tree_cons(NULL_TREE, arg10,
8345 build_tree_list (NULL_TREE, neg11));
8346 arg1 = build_function_call_expr (powfn, arglist);
8347 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8348 }
8349 }
8350
8351 if (flag_unsafe_math_optimizations)
8352 {
8353 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8354 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8355
8356 /* Optimize sin(x)/cos(x) as tan(x). */
8357 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8358 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8359 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8360 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8361 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8362 {
8363 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8364
8365 if (tanfn != NULL_TREE)
8366 return build_function_call_expr (tanfn,
8367 TREE_OPERAND (arg0, 1));
8368 }
8369
8370 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8371 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8372 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8373 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8374 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8375 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8376 {
8377 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8378
8379 if (tanfn != NULL_TREE)
8380 {
8381 tree tmp = TREE_OPERAND (arg0, 1);
8382 tmp = build_function_call_expr (tanfn, tmp);
8383 return fold_build2 (RDIV_EXPR, type,
8384 build_real (type, dconst1), tmp);
8385 }
8386 }
8387
8388 /* Optimize pow(x,c)/x as pow(x,c-1). */
8389 if (fcode0 == BUILT_IN_POW
8390 || fcode0 == BUILT_IN_POWF
8391 || fcode0 == BUILT_IN_POWL)
8392 {
8393 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8394 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8395 if (TREE_CODE (arg01) == REAL_CST
8396 && ! TREE_CONSTANT_OVERFLOW (arg01)
8397 && operand_equal_p (arg1, arg00, 0))
8398 {
8399 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8400 REAL_VALUE_TYPE c;
8401 tree arg, arglist;
8402
8403 c = TREE_REAL_CST (arg01);
8404 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8405 arg = build_real (type, c);
8406 arglist = build_tree_list (NULL_TREE, arg);
8407 arglist = tree_cons (NULL_TREE, arg1, arglist);
8408 return build_function_call_expr (powfn, arglist);
8409 }
8410 }
8411 }
8412 goto binary;
8413
*_DIV_EXPR[]
8414 case TRUNC_DIV_EXPR:
8415 case ROUND_DIV_EXPR:
8416 case FLOOR_DIV_EXPR:
8417 case CEIL_DIV_EXPR:
8418 case EXACT_DIV_EXPR:
8419 if (integer_onep (arg1))
8420 return non_lvalue (fold_convert (type, arg0));
8421 if (integer_zerop (arg1))
8422 return NULL_TREE;
8423 /* X / -1 is -X. */
8424 if (!TYPE_UNSIGNED (type)
8425 && TREE_CODE (arg1) == INTEGER_CST
8426 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8427 && TREE_INT_CST_HIGH (arg1) == -1)
8428 return fold_convert (type, negate_expr (arg0));
8429
8430 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8431 operation, EXACT_DIV_EXPR.
8432
8433 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8434 At one time others generated faster code, it's not clear if they do
8435 after the last round to changes to the DIV code in expmed.c. */
8436 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8437 && multiple_of_p (type, arg0, arg1))
8438 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8439
8440 if (TREE_CODE (arg1) == INTEGER_CST
8441 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8442 return fold_convert (type, tem);
8443
8444 goto binary;
8445
*_MOD_EXPR[]
8446 case CEIL_MOD_EXPR:
8447 case FLOOR_MOD_EXPR:
8448 case ROUND_MOD_EXPR:
8449 case TRUNC_MOD_EXPR:
8450 /* X % 1 is always zero, but be sure to preserve any side
8451 effects in X. */
8452 if (integer_onep (arg1))
8453 return omit_one_operand (type, integer_zero_node, arg0);
8454
8455 /* X % 0, return X % 0 unchanged so that we can get the
8456 proper warnings and errors. */
8457 if (integer_zerop (arg1))
8458 return NULL_TREE;
8459
8460 /* 0 % X is always zero, but be sure to preserve any side
8461 effects in X. Place this after checking for X == 0. */
8462 if (integer_zerop (arg0))
8463 return omit_one_operand (type, integer_zero_node, arg1);
8464
8465 /* X % -1 is zero. */
8466 if (!TYPE_UNSIGNED (type)
8467 && TREE_CODE (arg1) == INTEGER_CST
8468 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8469 && TREE_INT_CST_HIGH (arg1) == -1)
8470 return omit_one_operand (type, integer_zero_node, arg0);
8471
8472 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8473 i.e. "X % C" into "X & C2", if X and C are positive. */
8474 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8475 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8476 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8477 {
8478 unsigned HOST_WIDE_INT high, low;
8479 tree mask;
8480 int l;
8481
8482 l = tree_log2 (arg1);
8483 if (l >= HOST_BITS_PER_WIDE_INT)
8484 {
8485 high = ((unsigned HOST_WIDE_INT) 1
8486 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8487 low = -1;
8488 }
8489 else
8490 {
8491 high = 0;
8492 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8493 }
8494
8495 mask = build_int_cst_wide (type, low, high);
8496 return fold_build2 (BIT_AND_EXPR, type,
8497 fold_convert (type, arg0), mask);
8498 }
8499
8500 /* X % -C is the same as X % C. */
8501 if (code == TRUNC_MOD_EXPR
8502 && !TYPE_UNSIGNED (type)
8503 && TREE_CODE (arg1) == INTEGER_CST
8504 && !TREE_CONSTANT_OVERFLOW (arg1)
8505 && TREE_INT_CST_HIGH (arg1) < 0
8506 && !flag_trapv
8507 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8508 && !sign_bit_p (arg1, arg1))
8509 return fold_build2 (code, type, fold_convert (type, arg0),
8510 fold_convert (type, negate_expr (arg1)));
8511
8512 /* X % -Y is the same as X % Y. */
8513 if (code == TRUNC_MOD_EXPR
8514 && !TYPE_UNSIGNED (type)
8515 && TREE_CODE (arg1) == NEGATE_EXPR
8516 && !flag_trapv)
8517 return fold_build2 (code, type, fold_convert (type, arg0),
8518 fold_convert (type, TREE_OPERAND (arg1, 0)));
8519
8520 if (TREE_CODE (arg1) == INTEGER_CST
8521 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8522 return fold_convert (type, tem);
8523
8524 goto binary;
8525
ROTATE_EXPR[]
8526 case LROTATE_EXPR:
8527 case RROTATE_EXPR:
8528 if (integer_all_onesp (arg0))
8529 return omit_one_operand (type, arg0, arg1);
8530 goto shift;
8531
SHIFT_EXPR[]
8532 case RSHIFT_EXPR:
8533 /* Optimize -1 >> x for arithmetic right shifts. */
8534 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8535 return omit_one_operand (type, arg0, arg1);
8536 /* ... fall through ... */
8537
8538 case LSHIFT_EXPR:
8539 shift:
8540 if (integer_zerop (arg1))
8541 return non_lvalue (fold_convert (type, arg0));
8542 if (integer_zerop (arg0))
8543 return omit_one_operand (type, arg0, arg1);
8544
8545 /* Since negative shift count is not well-defined,
8546 don't try to compute it in the compiler. */
8547 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8548 return NULL_TREE;
8549
8550 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8551 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8552 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8553 && host_integerp (TREE_OPERAND (arg0, 1), false)
8554 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8555 {
8556 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8557 + TREE_INT_CST_LOW (arg1));
8558
8559 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8560 being well defined. */
8561 if (low >= TYPE_PRECISION (type))
8562 {
8563 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8564 low = low % TYPE_PRECISION (type);
8565 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8566 return build_int_cst (type, 0);
8567 else
8568 low = TYPE_PRECISION (type) - 1;
8569 }
8570
8571 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8572 build_int_cst (type, low));
8573 }
8574
8575 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8576 into x & ((unsigned)-1 >> c) for unsigned types. */
8577 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8578 || (TYPE_UNSIGNED (type)
8579 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8580 && host_integerp (arg1, false)
8581 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8582 && host_integerp (TREE_OPERAND (arg0, 1), false)
8583 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8584 {
8585 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8586 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8587 tree lshift;
8588 tree arg00;
8589
8590 if (low0 == low1)
8591 {
8592 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8593
8594 lshift = build_int_cst (type, -1);
8595 lshift = int_const_binop (code, lshift, arg1, 0);
8596
8597 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8598 }
8599 }
8600
8601 /* Rewrite an LROTATE_EXPR by a constant into an
8602 RROTATE_EXPR by a new constant. */
8603 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8604 {
8605 tree tem = build_int_cst (NULL_TREE,
8606 GET_MODE_BITSIZE (TYPE_MODE (type)));
8607 tem = fold_convert (TREE_TYPE (arg1), tem);
8608 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8609 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8610 }
8611
8612 /* If we have a rotate of a bit operation with the rotate count and
8613 the second operand of the bit operation both constant,
8614 permute the two operations. */
8615 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8616 && (TREE_CODE (arg0) == BIT_AND_EXPR
8617 || TREE_CODE (arg0) == BIT_IOR_EXPR
8618 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8620 return fold_build2 (TREE_CODE (arg0), type,
8621 fold_build2 (code, type,
8622 TREE_OPERAND (arg0, 0), arg1),
8623 fold_build2 (code, type,
8624 TREE_OPERAND (arg0, 1), arg1));
8625
8626 /* Two consecutive rotates adding up to the width of the mode can
8627 be ignored. */
8628 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8629 && TREE_CODE (arg0) == RROTATE_EXPR
8630 && TREE_CODE (TREE_OPERAND (arg0, 1)) ==
INTEGER_CST
8631 && TREE_INT_CST_HIGH (arg1) == 0
8632 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8633 && ((TREE_INT_CST_LOW (arg1)
8634 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8635 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8636 return TREE_OPERAND (arg0, 0);
8637
8638 goto binary;
8639
MIN_EXPR[]
8640 case MIN_EXPR:
8641 if (operand_equal_p (arg0, arg1, 0))
8642 return omit_one_operand (type, arg0, arg1);
8643 if (INTEGRAL_TYPE_P (type)
8644 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8645 return omit_one_operand (type, arg1, arg0);
8646 goto associate;
8647
MAX_EXPR[]
8648 case MAX_EXPR:
8649 if (operand_equal_p (arg0, arg1, 0))
8650 return omit_one_operand (type, arg0, arg1);
8651 if (INTEGRAL_TYPE_P (type)
8652 && TYPE_MAX_VALUE (type)
8653 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8654 return omit_one_operand (type, arg1, arg0);
8655 goto associate;
8656
TRUTH_ANDIF_EXPR[]
8657 case TRUTH_ANDIF_EXPR:
8658 /* Note that the operands of this must be ints
8659 and their values must be 0 or 1.
8660 ("true" is a fixed value perhaps depending on the language.) */
8661 /* If first arg is constant zero, return it. */
8662 if (integer_zerop (arg0))
8663 return fold_convert (type, arg0);
TRUTH_AND_EXPR[]
8664 case TRUTH_AND_EXPR:
8665 /* If either arg is constant true, drop it. */
8666 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8667 return non_lvalue (fold_convert (type, arg1));
8668 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8669 /* Preserve sequence points. */
8670 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8671 return non_lvalue (fold_convert (type, arg0));
8672 /* If second arg is constant zero, result is zero, but first arg
8673 must be evaluated. */
8674 if (integer_zerop (arg1))
8675 return omit_one_operand (type, arg1, arg0);
8676 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8677 case will be handled here. */
8678 if (integer_zerop (arg0))
8679 return omit_one_operand (type, arg0, arg1);
8680
8681 /* !X && X is always false. */
8682 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8683 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8684 return omit_one_operand (type, integer_zero_node, arg1);
8685 /* X && !X is always false. */
8686 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8687 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8688 return omit_one_operand (type, integer_zero_node, arg0);
8689
8690 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8691 means A >= Y && A != MAX, but in this case we know that
8692 A < X <= MAX. */
8693
8694 if (!TREE_SIDE_EFFECTS (arg0)
8695 && !TREE_SIDE_EFFECTS (arg1))
8696 {
8697 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8698 if (tem && !operand_equal_p (tem, arg0, 0))
8699 return fold_build2 (code, type, tem, arg1);
8700
8701 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8702 if (tem && !operand_equal_p (tem, arg1, 0))
8703 return fold_build2 (code, type, arg0, tem);
8704 }
8705
8706 truth_andor:
8707 /* We only do these simplifications if we are optimizing. */
8708 if (!optimize)
8709 return NULL_TREE;
8710
8711 /* Check for things like (A || B) && (A || C). We can convert this
8712 to A || (B && C). Note that either operator can be any of the four
8713 truth and/or operations and the transformation will still be
8714 valid. Also note that we only care about order for the
8715 ANDIF and ORIF operators. If B contains side effects, this
8716 might change the truth-value of A. */
8717 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8718 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8719 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8720 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8721 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8722 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8723 {
8724 tree a00 = TREE_OPERAND (arg0, 0);
8725 tree a01 = TREE_OPERAND (arg0, 1);
8726 tree a10 = TREE_OPERAND (arg1, 0);
8727 tree a11 = TREE_OPERAND (arg1, 1);
8728 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8729 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8730 && (code == TRUTH_AND_EXPR
8731 || code == TRUTH_OR_EXPR));
8732
8733 if (operand_equal_p (a00, a10, 0))
8734 return fold_build2 (TREE_CODE (arg0), type, a00,
8735 fold_build2 (code, type, a01, a11));
8736 else if (commutative && operand_equal_p (a00, a11, 0))
8737 return fold_build2 (TREE_CODE (arg0), type, a00,
8738 fold_build2 (code, type, a01, a10));
8739 else if (commutative && operand_equal_p (a01, a10, 0))
8740 return fold_build2 (TREE_CODE (arg0), type, a01,
8741 fold_build2 (code, type, a00, a11));
8742
8743 /* This case if tricky because we must either have commutative
8744 operators or else A10 must not have side-effects. */
8745
8746 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8747 && operand_equal_p (a01, a11, 0))
8748 return fold_build2 (TREE_CODE (arg0), type,
8749 fold_build2 (code, type, a00, a10),
8750 a01);
8751 }
8752
8753 /* See if we can build a range comparison. */
8754 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8755 return tem;
8756
8757 /* Check for the possibility of merging component references. If our
8758 lhs is another similar operation, try to merge its rhs with our
8759 rhs. Then try to merge our lhs and rhs. */
8760 if (TREE_CODE (arg0) == code
8761 && 0 != (tem = fold_truthop (code, type,
8762 TREE_OPERAND (arg0, 1), arg1)))
8763 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8764
8765 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8766 return tem;
8767
8768 return NULL_TREE;
8769
TRUTH_ORIF_EXPR[]
8770 case TRUTH_ORIF_EXPR:
8771 /* Note that the operands of this must be ints
8772 and their values must be 0 or true.
8773 ("true" is a fixed value perhaps depending on the language.) */
8774 /* If first arg is constant true, return it. */
8775 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8776 return fold_convert (type, arg0);
TRUTH_OR_EXPR[]
8777 case TRUTH_OR_EXPR:
8778 /* If either arg is constant zero, drop it. */
8779 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8780 return non_lvalue (fold_convert (type, arg1));
8781 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8782 /* Preserve sequence points. */
8783 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8784 return non_lvalue (fold_convert (type, arg0));
8785 /* If second arg is constant true, result is true, but we must
8786 evaluate first arg. */
8787 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8788 return omit_one_operand (type, arg1, arg0);
8789 /* Likewise for first arg, but note this only occurs here for
8790 TRUTH_OR_EXPR. */
8791 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8792 return omit_one_operand (type, arg0, arg1);
8793
8794 /* !X || X is always true. */
8795 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8796 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8797 return omit_one_operand (type, integer_one_node, arg1);
8798 /* X || !X is always true. */
8799 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8800 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8801 return omit_one_operand (type, integer_one_node, arg0);
8802
8803 goto truth_andor;
8804
TRUTH_XOR_EXPR[]
8805 case TRUTH_XOR_EXPR:
8806 /* If the second arg is constant zero, drop it. */
8807 if (integer_zerop (arg1))
8808 return non_lvalue (fold_convert (type, arg0));
8809 /* If the second arg is constant true, this is a logical inversion. */
8810 if (integer_onep (arg1))
8811 {
8812 /* Only call invert_truthvalue if operand is a truth value. */
8813 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8814 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8815 else
8816 tem = invert_truthvalue (arg0);
8817 return non_lvalue (fold_convert (type, tem));
8818 }
8819 /* Identical arguments cancel to zero. */
8820 if (operand_equal_p (arg0, arg1, 0))
8821 return omit_one_operand (type, integer_zero_node, arg0);
8822
8823 /* !X ^ X is always true. */
8824 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8826 return omit_one_operand (type, integer_one_node, arg1);
8827
8828 /* X ^ !X is always true. */
8829 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8830 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8831 return omit_one_operand (type, integer_one_node, arg0);
8832
8833 return NULL_TREE;
8834
EQ_EXPR, NE_EXPR, LT_EXPR, GT_EXPR, GE_EXPR[]
8835 case EQ_EXPR:
8836 case NE_EXPR:
8837 case LT_EXPR:
8838 case GT_EXPR:
8839 case LE_EXPR:
8840 case GE_EXPR:
8841 /* If one arg is a real or integer constant, put it last. */
8842 if (tree_swap_operands_p (arg0, arg1, true))
8843 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8844
8845 /* bool_var != 0 becomes bool_var. */
8846 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8847 && code == NE_EXPR)
8848 return non_lvalue (fold_convert (type, arg0));
8849
8850 /* bool_var == 1 becomes bool_var. */
8851 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8852 && code == EQ_EXPR)
8853 return non_lvalue (fold_convert (type, arg0));
8854
8855 /* If this is an equality comparison of the address of a non-weak
8856 object against zero, then we know the result. */
8857 if ((code == EQ_EXPR || code == NE_EXPR)
8858 && TREE_CODE (arg0) == ADDR_EXPR
8859 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8860 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8861 && integer_zerop (arg1))
8862 return constant_boolean_node (code != EQ_EXPR, type);
8863
8864 /* If this is an equality comparison of the address of two non-weak,
8865 unaliased symbols neither of which are extern (since we do not
8866 have access to attributes for externs), then we know the result. */
8867 if ((code == EQ_EXPR || code == NE_EXPR)
8868 && TREE_CODE (arg0) == ADDR_EXPR
8869 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8870 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8871 && ! lookup_attribute ("alias",
8872 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8873 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8874 && TREE_CODE (arg1) == ADDR_EXPR
8875 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8876 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8877 && ! lookup_attribute ("alias",
8878 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8879 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8880 {
8881 /* We know that we're looking at the address of two
8882 non-weak, unaliased, static _DECL nodes.
8883
8884 It is both wasteful and incorrect to call operand_equal_p
8885 to compare the two ADDR_EXPR nodes. It is wasteful in that
8886 all we need to do is test pointer equality for the arguments
8887 to the two ADDR_EXPR nodes. It is incorrect to use
8888 operand_equal_p as that function is NOT equivalent to a
8889 C equality test. It can in fact return false for two
8890 objects which would test as equal using the C equality
8891 operator. */
8892 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8893 return constant_boolean_node (equal
8894 ? code == EQ_EXPR : code != EQ_EXPR,
8895 type);
8896 }
8897
8898 /* If this is a comparison of two exprs that look like an
8899 ARRAY_REF of the same object, then we can fold this to a
8900 comparison of the two offsets. */
8901 if (TREE_CODE_CLASS (code) == tcc_comparison)
8902 {
8903 tree base0, offset0, base1, offset1;
8904
8905 if (extract_array_ref (arg0, &base0, &offset0)
8906 && extract_array_ref (arg1, &base1, &offset1)
8907 && operand_equal_p (base0, base1, 0))
8908 {
8909 /* Handle no offsets on both sides specially. */
8910 if (offset0 == NULL_TREE
8911 && offset1 == NULL_TREE)
8912 return fold_build2 (code, type, integer_zero_node,
8913 integer_zero_node);
8914
8915 if (!offset0 || !offset1
8916 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8917 {
8918 if (offset0 == NULL_TREE)
8919 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8920 if (offset1 == NULL_TREE)
8921 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8922 return fold_build2 (code, type, offset0, offset1);
8923 }
8924 }
8925 }
8926
8927 /* Transform comparisons of the form X +- C CMP X. */
8928 if ((code != EQ_EXPR && code != NE_EXPR)
8929 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8930 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8931 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8932 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8933 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8934 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8935 && !(flag_wrapv || flag_trapv))))
8936 {
8937 tree arg01 = TREE_OPERAND (arg0, 1);
8938 enum tree_code code0 = TREE_CODE (arg0);
8939 int is_positive;
8940
8941 if (TREE_CODE (arg01) == REAL_CST)
8942 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8943 else
8944 is_positive = tree_int_cst_sgn (arg01);
8945
8946 /* (X - c) > X becomes false. */
8947 if (code == GT_EXPR
8948 && ((code0 == MINUS_EXPR && is_positive >= 0)
8949 || (code0 == PLUS_EXPR && is_positive <= 0)))
8950 return constant_boolean_node (0, type);
8951
8952 /* Likewise (X + c) < X becomes false. */
8953 if (code == LT_EXPR
8954 && ((code0 == PLUS_EXPR && is_positive >= 0)
8955 || (code0 == MINUS_EXPR && is_positive <= 0)))
8956 return constant_boolean_node (0, type);
8957
8958 /* Convert (X - c) <= X to true. */
8959 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8960 && code == LE_EXPR
8961 && ((code0 == MINUS_EXPR && is_positive >= 0)
8962 || (code0 == PLUS_EXPR && is_positive <= 0)))
8963 return constant_boolean_node (1, type);
8964
8965 /* Convert (X + c) >= X to true. */
8966 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8967 && code == GE_EXPR
8968 && ((code0 == PLUS_EXPR && is_positive >= 0)
8969 || (code0 == MINUS_EXPR && is_positive <= 0)))
8970 return constant_boolean_node (1, type);
8971
8972 if (TREE_CODE (arg01) == INTEGER_CST)
8973 {
8974 /* Convert X + c > X and X - c < X to true for integers. */
8975 if (code == GT_EXPR
8976 && ((code0 == PLUS_EXPR && is_positive > 0)
8977 || (code0 == MINUS_EXPR && is_positive < 0)))
8978 return constant_boolean_node (1, type);
8979
8980 if (code == LT_EXPR
8981 && ((code0 == MINUS_EXPR && is_positive > 0)
8982 || (code0 == PLUS_EXPR && is_positive < 0)))
8983 return constant_boolean_node (1, type);
8984
8985 /* Convert X + c <= X and X - c >= X to false for integers. */
8986 if (code == LE_EXPR
8987 && ((code0 == PLUS_EXPR && is_positive > 0)
8988 || (code0 == MINUS_EXPR && is_positive < 0)))
8989 return constant_boolean_node (0, type);
8990
8991 if (code == GE_EXPR
8992 && ((code0 == MINUS_EXPR && is_positive > 0)
8993 || (code0 == PLUS_EXPR && is_positive < 0)))
8994 return constant_boolean_node (0, type);
8995 }
8996 }
8997
8998 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8999 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9000 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9001 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9002 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9003 && !(flag_wrapv || flag_trapv))
9004 && (TREE_CODE (arg1) == INTEGER_CST
9005 && !TREE_OVERFLOW (arg1)))
9006 {
9007 tree const1 = TREE_OPERAND (arg0, 1);
9008 tree const2 = arg1;
9009 tree variable = TREE_OPERAND (arg0, 0);
9010 tree lhs;
9011 int lhs_add;
9012 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9013
9014 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9015 TREE_TYPE (arg1), const2, const1);
9016 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9017 && (TREE_CODE (lhs) != INTEGER_CST
9018 || !TREE_OVERFLOW (lhs)))
9019 return fold_build2 (code, type, variable, lhs);
9020 }
9021
9022 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9023 {
9024 tree targ0 = strip_float_extensions (arg0);
9025 tree targ1 = strip_float_extensions (arg1);
9026 tree newtype = TREE_TYPE (targ0);
9027
9028 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9029 newtype = TREE_TYPE (targ1);
9030
9031 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9032 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9033 return fold_build2 (code, type, fold_convert (newtype, targ0),
9034 fold_convert (newtype, targ1));
9035
9036 /* (-a) CMP (-b) -> b CMP a */
9037 if (TREE_CODE (arg0) == NEGATE_EXPR
9038 && TREE_CODE (arg1) == NEGATE_EXPR)
9039 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9040 TREE_OPERAND (arg0, 0));
9041
9042 if (TREE_CODE (arg1) == REAL_CST)
9043 {
9044 REAL_VALUE_TYPE cst;
9045 cst = TREE_REAL_CST (arg1);
9046
9047 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9048 if (TREE_CODE (arg0) == NEGATE_EXPR)
9049 return
9050 fold_build2 (swap_tree_comparison (code), type,
9051 TREE_OPERAND (arg0, 0),
9052 build_real (TREE_TYPE (arg1),
9053 REAL_VALUE_NEGATE (cst)));
9054
9055 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9056 /* a CMP (-0) -> a CMP 0 */
9057 if (REAL_VALUE_MINUS_ZERO (cst))
9058 return fold_build2 (code, type, arg0,
9059 build_real (TREE_TYPE (arg1), dconst0));
9060
9061 /* x != NaN is always true, other ops are always false. */
9062 if (REAL_VALUE_ISNAN (cst)
9063 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9064 {
9065 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9066 return omit_one_operand (type, tem, arg0);
9067 }
9068
9069 /* Fold comparisons against infinity. */
9070 if (REAL_VALUE_ISINF (cst))
9071 {
9072 tem = fold_inf_compare (code, type, arg0, arg1);
9073 if (tem != NULL_TREE)
9074 return tem;
9075 }
9076 }
9077
9078 /* If this is a comparison of a real constant with a PLUS_EXPR
9079 or a MINUS_EXPR of a real constant, we can convert it into a
9080 comparison with a revised real constant as long as no overflow
9081 occurs when unsafe_math_optimizations are enabled. */
9082 if (flag_unsafe_math_optimizations
9083 && TREE_CODE (arg1) == REAL_CST
9084 && (TREE_CODE (arg0) == PLUS_EXPR
9085 || TREE_CODE (arg0) == MINUS_EXPR)
9086 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9087 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9088 ? MINUS_EXPR : PLUS_EXPR,
9089 arg1, TREE_OPERAND (arg0, 1), 0))
9090 && ! TREE_CONSTANT_OVERFLOW (tem))
9091 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9092
9093 /* Likewise, we can simplify a comparison of a real constant with
9094 a MINUS_EXPR whose first operand is also a real constant, i.e.
9095 (c1 - x) < c2 becomes x > c1-c2. */
9096 if (flag_unsafe_math_optimizations
9097 && TREE_CODE (arg1) == REAL_CST
9098 && TREE_CODE (arg0) == MINUS_EXPR
9099 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9100 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9101 arg1, 0))
9102 && ! TREE_CONSTANT_OVERFLOW (tem))
9103 return fold_build2 (swap_tree_comparison (code), type,
9104 TREE_OPERAND (arg0, 1), tem);
9105
9106 /* Fold comparisons against built-in math functions. */
9107 if (TREE_CODE (arg1) == REAL_CST
9108 && flag_unsafe_math_optimizations
9109 && ! flag_errno_math)
9110 {
9111 enum built_in_function fcode = builtin_mathfn_code (arg0);
9112
9113 if (fcode != END_BUILTINS)
9114 {
9115 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9116 if (tem != NULL_TREE)
9117 return tem;
9118 }
9119 }
9120 }
9121
9122 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9123 if (TREE_CONSTANT (arg1)
9124 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9125 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9126 /* This optimization is invalid for ordered comparisons
9127 if CONST+INCR overflows or if foo+incr might overflow.
9128 This optimization is invalid for floating point due to rounding.
9129 For pointer types we assume overflow doesn't happen. */
9130 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9131 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9132 && (code == EQ_EXPR || code == NE_EXPR))))
9133 {
9134 tree varop, newconst;
9135
9136 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9137 {
9138 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9139 arg1, TREE_OPERAND (arg0, 1));
9140 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9141 TREE_OPERAND (arg0, 0),
9142 TREE_OPERAND (arg0, 1));
9143 }
9144 else
9145 {
9146 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9147 arg1, TREE_OPERAND (arg0, 1));
9148 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9149 TREE_OPERAND (arg0, 0),
9150 TREE_OPERAND (arg0, 1));
9151 }
9152
9153
9154 /* If VAROP is a reference to a bitfield, we must mask
9155 the constant by the width of the field. */
9156 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9157 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9158 && host_integerp (DECL_SIZE (TREE_OPERAND
9159 (TREE_OPERAND (varop, 0), 1)), 1))
9160 {
9161 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9162 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9163 tree folded_compare, shift;
9164
9165 /* First check whether the comparison would come out
9166 always the same. If we don't do that we would
9167 change the meaning with the masking. */
9168 folded_compare = fold_build2 (code, type,
9169 TREE_OPERAND (varop, 0), arg1);
9170 if (integer_zerop (folded_compare)
9171 || integer_onep (folded_compare))
9172 return omit_one_operand (type, folded_compare, varop);
9173
9174 shift = build_int_cst (NULL_TREE,
9175 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9176 shift = fold_convert (TREE_TYPE (varop), shift);
9177 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9178 newconst, shift);
9179 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9180 newconst, shift);
9181 }
9182
9183 return fold_build2 (code, type, varop, newconst);
9184 }
9185
9186 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9187 This transformation affects the cases which are handled in later
9188 optimizations involving comparisons with non-negative constants. */
9189 if (TREE_CODE (arg1) == INTEGER_CST
9190 && TREE_CODE (arg0) != INTEGER_CST
9191 && tree_int_cst_sgn (arg1) > 0)
9192 {
9193 switch (code)
9194 {
9195 case GE_EXPR:
9196 arg1 = const_binop (MINUS_EXPR, arg1,
9197 build_int_cst (TREE_TYPE (arg1), 1), 0);
9198 return fold_build2 (GT_EXPR, type, arg0,
9199 fold_convert (TREE_TYPE (arg0), arg1));
9200
9201 case LT_EXPR:
9202 arg1 = const_binop (MINUS_EXPR, arg1,
9203 build_int_cst (TREE_TYPE (arg1), 1), 0);
9204 return fold_build2 (LE_EXPR, type, arg0,
9205 fold_convert (TREE_TYPE (arg0), arg1));
9206
9207 default:
9208 break;
9209 }
9210 }
9211
9212 /* Comparisons with the highest or lowest possible integer of
9213 the specified size will have known values. */
9214 {
9215 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9216
9217 if (TREE_CODE (arg1) == INTEGER_CST
9218 && ! TREE_CONSTANT_OVERFLOW (arg1)
9219 && width <= 2 * HOST_BITS_PER_WIDE_INT
9220 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9221 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9222 {
9223 HOST_WIDE_INT signed_max_hi;
9224 unsigned HOST_WIDE_INT signed_max_lo;
9225 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9226
9227 if (width <= HOST_BITS_PER_WIDE_INT)
9228 {
9229 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9230 - 1;
9231 signed_max_hi = 0;
9232 max_hi = 0;
9233
9234 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9235 {
9236 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9237 min_lo = 0;
9238 min_hi = 0;
9239 }
9240 else
9241 {
9242 max_lo = signed_max_lo;
9243 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9244 min_hi = -1;
9245 }
9246 }
9247 else
9248 {
9249 width -= HOST_BITS_PER_WIDE_INT;
9250 signed_max_lo = -1;
9251 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9252 - 1;
9253 max_lo = -1;
9254 min_lo = 0;
9255
9256 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9257 {
9258 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9259 min_hi = 0;
9260 }
9261 else
9262 {
9263 max_hi = signed_max_hi;
9264 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9265 }
9266 }
9267
9268 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9269 && TREE_INT_CST_LOW (arg1) == max_lo)
9270 switch (code)
9271 {
9272 case GT_EXPR:
9273 return omit_one_operand (type, integer_zero_node, arg0);
9274
9275 case GE_EXPR:
9276 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9277
9278 case LE_EXPR:
9279 return omit_one_operand (type, integer_one_node, arg0);
9280
9281 case LT_EXPR:
9282 return fold_build2 (NE_EXPR, type, arg0, arg1);
9283
9284 /* The GE_EXPR and LT_EXPR cases above are not normally
9285 reached because of previous transformations. */
9286
9287 default:
9288 break;
9289 }
9290 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9291 == max_hi
9292 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9293 switch (code)
9294 {
9295 case GT_EXPR:
9296 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9297 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9298 case LE_EXPR:
9299 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9300 return fold_build2 (NE_EXPR, type, arg0, arg1);
9301 default:
9302 break;
9303 }
9304 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9305 == min_hi
9306 && TREE_INT_CST_LOW (arg1) == min_lo)
9307 switch (code)
9308 {
9309 case LT_EXPR:
9310 return omit_one_operand (type, integer_zero_node, arg0);
9311
9312 case LE_EXPR:
9313 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9314
9315 case GE_EXPR:
9316 return omit_one_operand (type, integer_one_node, arg0);
9317
9318 case GT_EXPR:
9319 return fold_build2 (NE_EXPR, type, op0, op1);
9320
9321 default:
9322 break;
9323 }
9324 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9325 == min_hi
9326 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9327 switch (code)
9328 {
9329 case GE_EXPR:
9330 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9331 return fold_build2 (NE_EXPR, type, arg0, arg1);
9332 case LT_EXPR:
9333 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9334 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9335 default:
9336 break;
9337 }
9338
9339 else if (!in_gimple_form
9340 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9341 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9342 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9343 /* signed_type does not work on pointer types. */
9344 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9345 {
9346 /* The following case also applies to X < signed_max+1
9347 and X >= signed_max+1 because previous transformations. */
9348 if (code == LE_EXPR || code == GT_EXPR)
9349 {
9350 tree st0, st1;
9351 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9352 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9353 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9354 type, fold_convert (st0, arg0),
9355 build_int_cst (st1, 0));
9356 }
9357 }
9358 }
9359 }
9360
9361 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9362 a MINUS_EXPR of a constant, we can convert it into a comparison with
9363 a revised constant as long as no overflow occurs. */
9364 if ((code == EQ_EXPR || code == NE_EXPR)
9365 && TREE_CODE (arg1) == INTEGER_CST
9366 && (TREE_CODE (arg0) == PLUS_EXPR
9367 || TREE_CODE (arg0) == MINUS_EXPR)
9368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9369 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9370 ? MINUS_EXPR : PLUS_EXPR,
9371 arg1, TREE_OPERAND (arg0, 1), 0))
9372 && ! TREE_CONSTANT_OVERFLOW (tem))
9373 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9374
9375 /* Similarly for a NEGATE_EXPR. */
9376 else if ((code == EQ_EXPR || code == NE_EXPR)
9377 && TREE_CODE (arg0) == NEGATE_EXPR
9378 && TREE_CODE (arg1) == INTEGER_CST
9379 && 0 != (tem = negate_expr (arg1))
9380 && TREE_CODE (tem) == INTEGER_CST
9381 && ! TREE_CONSTANT_OVERFLOW (tem))
9382 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9383
9384 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9385 for !=. Don't do this for ordered comparisons due to overflow. */
9386 else if ((code == NE_EXPR || code == EQ_EXPR)
9387 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9388 return fold_build2 (code, type,
9389 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9390
9391 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9392 && (TREE_CODE (arg0) == NOP_EXPR
9393 || TREE_CODE (arg0) == CONVERT_EXPR))
9394 {
9395 /* If we are widening one operand of an integer comparison,
9396 see if the other operand is similarly being widened. Perhaps we
9397 can do the comparison in the narrower type. */
9398 tem = fold_widened_comparison (code, type, arg0, arg1);
9399 if (tem)
9400 return tem;
9401
9402 /* Or if we are changing signedness. */
9403 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9404 if (tem)
9405 return tem;
9406 }
9407
9408 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9409 constant, we can simplify it. */
9410 else if (TREE_CODE (arg1) == INTEGER_CST
9411 && (TREE_CODE (arg0) == MIN_EXPR
9412 || TREE_CODE (arg0) == MAX_EXPR)
9413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9414 {
9415 tem = optimize_minmax_comparison (code, type, op0, op1);
9416 if (tem)
9417 return tem;
9418
9419 return NULL_TREE;
9420 }
9421
9422 /* If we are comparing an ABS_EXPR with a constant, we can
9423 convert all the cases into explicit comparisons, but they may
9424 well not be faster than doing the ABS and one comparison.
9425 But ABS (X) <= C is a range comparison, which becomes a subtraction
9426 and a comparison, and is probably faster. */
9427 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9428 && TREE_CODE (arg0) == ABS_EXPR
9429 && ! TREE_SIDE_EFFECTS (arg0)
9430 && (0 != (tem = negate_expr (arg1)))
9431 && TREE_CODE (tem) == INTEGER_CST
9432 && ! TREE_CONSTANT_OVERFLOW (tem))
9433 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9434 build2 (GE_EXPR, type,
9435 TREE_OPERAND (arg0, 0), tem),
9436 build2 (LE_EXPR, type,
9437 TREE_OPERAND (arg0, 0), arg1));
9438
9439 /* Convert ABS_EXPR<x> >= 0 to true. */
9440 else if (code == GE_EXPR
9441 && tree_expr_nonnegative_p (arg0)
9442 && (integer_zerop (arg1)
9443 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9444 && real_zerop (arg1))))
9445 return omit_one_operand (type, integer_one_node, arg0);
9446
9447 /* Convert ABS_EXPR<x> < 0 to false. */
9448 else if (code == LT_EXPR
9449 && tree_expr_nonnegative_p (arg0)
9450 && (integer_zerop (arg1) || real_zerop (arg1)))
9451 return omit_one_operand (type, integer_zero_node, arg0);
9452
9453 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9454 else if ((code == EQ_EXPR || code == NE_EXPR)
9455 && TREE_CODE (arg0) == ABS_EXPR
9456 && (integer_zerop (arg1) || real_zerop (arg1)))
9457 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9458
9459 /* If this is an EQ or NE comparison with zero and ARG0 is
9460 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9461 two operations, but the latter can be done in one less insn
9462 on machines that have only two-operand insns or on which a
9463 constant cannot be the first operand. */
9464 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9465 && TREE_CODE (arg0) == BIT_AND_EXPR)
9466 {
9467 tree arg00 = TREE_OPERAND (arg0, 0);
9468 tree arg01 = TREE_OPERAND (arg0, 1);
9469 if (TREE_CODE (arg00) == LSHIFT_EXPR
9470 && integer_onep (TREE_OPERAND (arg00, 0)))
9471 return
9472 fold_build2 (code, type,
9473 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9474 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9475 arg01, TREE_OPERAND (arg00, 1)),
9476 fold_convert (TREE_TYPE (arg0),
9477 integer_one_node)),
9478 arg1);
9479 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9480 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9481 return
9482 fold_build2 (code, type,
9483 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9484 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9485 arg00, TREE_OPERAND (arg01, 1)),
9486 fold_convert (TREE_TYPE (arg0),
9487 integer_one_node)),
9488 arg1);
9489 }
9490
9491 /* If this is an NE or EQ comparison of zero against the result of a
9492 signed MOD operation whose second operand is a power of 2, make
9493 the MOD operation unsigned since it is simpler and equivalent. */
9494 if ((code == NE_EXPR || code == EQ_EXPR)
9495 && integer_zerop (arg1)
9496 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9497 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9498 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9499 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9500 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9501 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9502 {
9503 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9504 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9505 fold_convert (newtype,
9506 TREE_OPERAND (arg0, 0)),
9507 fold_convert (newtype,
9508 TREE_OPERAND (arg0, 1)));
9509
9510 return fold_build2 (code, type, newmod,
9511 fold_convert (newtype, arg1));
9512 }
9513
9514 /* If this is an NE comparison of zero with an AND of one, remove the
9515 comparison since the AND will give the correct value. */
9516 if (code == NE_EXPR && integer_zerop (arg1)
9517 && TREE_CODE (arg0) == BIT_AND_EXPR
9518 && integer_onep (TREE_OPERAND (arg0, 1)))
9519 return fold_convert (type, arg0);
9520
9521 /* If we have (A & C) == C where C is a power of 2, convert this into
9522 (A & C) != 0. Similarly for NE_EXPR. */
9523 if ((code == EQ_EXPR || code == NE_EXPR)
9524 && TREE_CODE (arg0) == BIT_AND_EXPR
9525 && integer_pow2p (TREE_OPERAND (arg0, 1))
9526 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9527 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9528 arg0, fold_convert (TREE_TYPE (arg0),
9529 integer_zero_node));
9530
9531 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9532 bit, then fold the expression into A < 0 or A >= 0. */
9533 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9534 if (tem)
9535 return tem;
9536
9537 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9538 Similarly for NE_EXPR. */
9539 if ((code == EQ_EXPR || code == NE_EXPR)
9540 && TREE_CODE (arg0) == BIT_AND_EXPR
9541 && TREE_CODE (arg1) == INTEGER_CST
9542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9543 {
9544 tree notc = fold_build1 (BIT_NOT_EXPR,
9545 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9546 TREE_OPERAND (arg0, 1));
9547 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9548 arg1, notc);
9549 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9550 if (integer_nonzerop (dandnotc))
9551 return omit_one_operand (type, rslt, arg0);
9552 }
9553
9554 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9555 Similarly for NE_EXPR. */
9556 if ((code == EQ_EXPR || code == NE_EXPR)
9557 && TREE_CODE (arg0) == BIT_IOR_EXPR
9558 && TREE_CODE (arg1) == INTEGER_CST
9559 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9560 {
9561 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9562 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9563 TREE_OPERAND (arg0, 1), notd);
9564 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9565 if (integer_nonzerop (candnotd))
9566 return omit_one_operand (type, rslt, arg0);
9567 }
9568
9569 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9570 and similarly for >= into !=. */
9571 if ((code == LT_EXPR || code == GE_EXPR)
9572 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9573 && TREE_CODE (arg1) == LSHIFT_EXPR
9574 && integer_onep (TREE_OPERAND (arg1, 0)))
9575 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9576 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9577 TREE_OPERAND (arg1, 1)),
9578 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9579
9580 else if ((code == LT_EXPR || code == GE_EXPR)
9581 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9582 && (TREE_CODE (arg1) == NOP_EXPR
9583 || TREE_CODE (arg1) == CONVERT_EXPR)
9584 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9585 && integer_onep (TREE_OPERAND ([TREE_OPERAND]] (arg1, 0), 0)))
9586 return
9587 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9588 fold_convert (TREE_TYPE (arg0),
9589 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9590 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9591 1))),
9592 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9593
9594 /* Simplify comparison of something with itself. (For IEEE
9595 floating-point, we can only do some of these simplifications.) */
9596 if (operand_equal_p (arg0, arg1, 0))
9597 {
9598 switch (code)
9599 {
9600 case EQ_EXPR:
9601 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9602 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9603 return constant_boolean_node (1, type);
9604 break;
9605
9606 case GE_EXPR:
9607 case LE_EXPR:
9608 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9609 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9610 return constant_boolean_node (1, type);
9611 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9612
9613 case NE_EXPR:
9614 /* For NE, we can only do this simplification if integer
9615 or we don't honor IEEE floating point NaNs. */
9616 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9617 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9618 break;
9619 /* ... fall through ... */
9620 case GT_EXPR:
9621 case LT_EXPR:
9622 return constant_boolean_node (0, type);
9623 default:
9624 gcc_unreachable ();
9625 }
9626 }
9627
9628 /* If we are comparing an expression that just has comparisons
9629 of two integer values, arithmetic expressions of those comparisons,
9630 and constants, we can simplify it. There are only three cases
9631 to check: the two values can either be equal, the first can be
9632 greater, or the second can be greater. Fold the expression for
9633 those three values. Since each value must be 0 or 1, we have
9634 eight possibilities, each of which corresponds to the constant 0
9635 or 1 or one of the six possible comparisons.
9636
9637 This handles common cases like (a > b) == 0 but also handles
9638 expressions like ((x > y) - (y > x)) > 0, which supposedly
9639 occur in macroized code. */
9640
9641 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9642 {
9643 tree cval1 = 0, cval2 = 0;
9644 int save_p = 0;
9645
9646 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9647 /* Don't handle degenerate cases here; they should already
9648 have been handled anyway. */
9649 && cval1 != 0 && cval2 != 0
9650 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9651 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9652 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9653 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9654 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9655 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9656 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9657 {
9658 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9659 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9660
9661 /* We can't just pass T to eval_subst in case cval1 or cval2
9662 was the same as ARG1. */
9663
9664 tree high_result
9665 = fold_build2 (code, type,
9666 eval_subst (arg0, cval1, maxval,
9667 cval2, minval),
9668 arg1);
9669 tree equal_result
9670 = fold_build2 (code, type,
9671 eval_subst (arg0, cval1, maxval,
9672 cval2, maxval),
9673 arg1);
9674 tree low_result
9675 = fold_build2 (code, type,
9676 eval_subst (arg0, cval1, minval,
9677 cval2, maxval),
9678 arg1);
9679
9680 /* All three of these results should be 0 or 1. Confirm they
9681 are. Then use those values to select the proper code
9682 to use. */
9683
9684 if ((integer_zerop (high_result)
9685 || integer_onep (high_result))
9686 && (integer_zerop (equal_result)
9687 || integer_onep (equal_result))
9688 && (integer_zerop (low_result)
9689 || integer_onep (low_result)))
9690 {
9691 /* Make a 3-bit mask with the high-order bit being the
9692 value for `>', the next for '=', and the low for '<'. */
9693 switch ((integer_onep (high_result) * 4)
9694 + (integer_onep (equal_result) * 2)
9695 + integer_onep (low_result))
9696 {
9697 case 0:
9698 /* Always false. */
9699 return omit_one_operand (type, integer_zero_node, arg0);
9700 case 1:
9701 code = LT_EXPR;
9702 break;
9703 case 2:
9704 code = EQ_EXPR;
9705 break;
9706 case 3:
9707 code = LE_EXPR;
9708 break;
9709 case 4:
9710 code = GT_EXPR;
9711 break;
9712 case 5:
9713 code = NE_EXPR;
9714 break;
9715 case 6:
9716 code = GE_EXPR;
9717 break;
9718 case 7:
9719 /* Always true. */
9720 return omit_one_operand (type, integer_one_node, arg0);
9721 }
9722
9723 if (save_p)
9724 return save_expr (build2 (code, type, cval1, cval2));
9725 else
9726 return fold_build2 (code, type, cval1, cval2);
9727 }
9728 }
9729 }
9730
9731 /* If this is a comparison of a field, we may be able to simplify it. */
9732 if (((TREE_CODE (arg0) == COMPONENT_REF
9733 && lang_hooks.can_use_bit_fields_p ())
9734 || TREE_CODE (arg0) == BIT_FIELD_REF)
9735 && (code == EQ_EXPR || code == NE_EXPR)
9736 /* Handle the constant case even without -O
9737 to make sure the warnings are given. */
9738 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9739 {
9740 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9741 if (t1)
9742 return t1;
9743 }
9744
9745 /* Fold a comparison of the address of COMPONENT_REFs with the same
9746 type and component to a comparison of the address of the base
9747 object. In short, &x->a OP &y->a to x OP y and
9748 &x->a OP &y.a to x OP &y */
9749 if (TREE_CODE (arg0) == ADDR_EXPR
9750 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9751 && TREE_CODE (arg1) == ADDR_EXPR
9752 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9753 {
9754 tree cref0 = TREE_OPERAND (arg0, 0);
9755 tree cref1 = TREE_OPERAND (arg1, 0);
9756 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9757 {
9758 tree op0 = TREE_OPERAND (cref0, 0);
9759 tree op1 = TREE_OPERAND (cref1, 0);
9760 return fold_build2 (code, type,
9761 build_fold_addr_expr (op0),
9762 build_fold_addr_expr (op1));
9763 }
9764 }
9765
9766 /* Optimize comparisons of strlen vs zero to a compare of the
9767 first character of the string vs zero. To wit,
9768 strlen(ptr) == 0 => *ptr == 0
9769 strlen(ptr) != 0 => *ptr != 0
9770 Other cases should reduce to one of these two (or a constant)
9771 due to the return value of strlen being unsigned. */
9772 if ((code == EQ_EXPR || code == NE_EXPR)
9773 && integer_zerop (arg1)
9774 && TREE_CODE (arg0) == CALL_EXPR)
9775 {
9776 tree fndecl = get_callee_fndecl (arg0);
9777 tree arglist;
9778
9779 if (fndecl
9780 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9781 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9782 && (arglist = TREE_OPERAND (arg0, 1))
9783 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9784 && ! TREE_CHAIN (arglist))
9785 {
9786 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9787 return fold_build2 (code, type, iref,
9788 build_int_cst (TREE_TYPE (iref), 0));
9789 }
9790 }
9791
9792 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9793 into a single range test. */
9794 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9795 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9796 && TREE_CODE (arg1) == INTEGER_CST
9797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9798 && !integer_zerop (TREE_OPERAND (arg0, 1))
9799 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9800 && !TREE_OVERFLOW (arg1))
9801 {
9802 t1 = fold_div_compare (code, type, arg0, arg1);
9803 if (t1 != NULL_TREE)
9804 return t1;
9805 }
9806
9807 if ((code == EQ_EXPR || code == NE_EXPR)
9808 && integer_zerop (arg1)
9809 && tree_expr_nonzero_p (arg0))
9810 {
9811 tree res = constant_boolean_node (code==NE_EXPR, type);
9812 return omit_one_operand (type, res, arg0);
9813 }
9814
9815 t1 = fold_relational_const (code, type, arg0, arg1);
9816 return t1 == NULL_TREE ? NULL_TREE : t1;
9817
UNORDERED_EXPR, ORDERED_EXPR, UNLT_EXPR, UNLE_EXPR, UNGT_EXPR, UNGE_EXPR, UNEQ_EXPR, LTGT_EXPR[]
9818 case UNORDERED_EXPR:
9819 case ORDERED_EXPR:
9820 case UNLT_EXPR:
9821 case UNLE_EXPR:
9822 case UNGT_EXPR:
9823 case UNGE_EXPR:
9824 case UNEQ_EXPR:
9825 case LTGT_EXPR:
9826 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9827 {
9828 t1 = fold_relational_const (code, type, arg0, arg1);
9829 if (t1 != NULL_TREE)
9830 return t1;
9831 }
9832
9833 /* If the first operand is NaN, the result is constant. */
9834 if (TREE_CODE (arg0) == REAL_CST
9835 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9836 && (code != LTGT_EXPR || ! flag_trapping_math))
9837 {
9838 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9839 ? integer_zero_node
9840 : integer_one_node;
9841 return omit_one_operand (type, t1, arg1);
9842 }
9843
9844 /* If the second operand is NaN, the result is constant. */
9845 if (TREE_CODE (arg1) == REAL_CST
9846 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9847 && (code != LTGT_EXPR || ! flag_trapping_math))
9848 {
9849 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9850 ? integer_zero_node
9851 : integer_one_node;
9852 return omit_one_operand (type, t1, arg0);
9853 }
9854
9855 /* Simplify unordered comparison of something with itself. */
9856 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9857 && operand_equal_p (arg0, arg1, 0))
9858 return constant_boolean_node (1, type);
9859
9860 if (code == LTGT_EXPR
9861 && !flag_trapping_math
9862 && operand_equal_p (arg0, arg1, 0))
9863 return constant_boolean_node (0, type);
9864
9865 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9866 {
9867 tree targ0 = strip_float_extensions (arg0);
9868 tree targ1 = strip_float_extensions (arg1);
9869 tree newtype = TREE_TYPE (targ0);
9870
9871 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9872 newtype = TREE_TYPE (targ1);
9873
9874 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9875 return fold_build2 (code, type, fold_convert (newtype, targ0),
9876 fold_convert (newtype, targ1));
9877 }
9878
9879 return NULL_TREE;
9880
COMPOUND_EXPR[]
9881 case COMPOUND_EXPR:
9882 /* When pedantic, a compound expression can be neither an lvalue
9883 nor an integer constant expression. */
9884 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9885 return NULL_TREE;
9886 /* Don't let (0, 0) be null pointer constant. */
9887 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9888 : fold_convert (type, arg1);
9889 return pedantic_non_lvalue (tem);
9890
COMPLEX_EXPR[]
9891 case COMPLEX_EXPR:
9892 if (wins)
9893 return build_complex (type, arg0, arg1);
9894 return NULL_TREE;
9895
ASSERT_EXPR[]
9896 case ASSERT_EXPR:
9897 /* An ASSERT_EXPR should never be passed to fold_binary. */
9898 gcc_unreachable ();
9899
9900 default:
9901 return NULL_TREE;
9902 } /* switch (code) */
9903 }