@@ -4127,7 +4127,37 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
41274127 GenTree* op1 = cmp->gtGetOp1 ();
41284128 GenTreeIntCon* op2 = cmp->gtGetOp2 ()->AsIntCon ();
41294129
4130- #if defined(TARGET_XARCH) || defined(TARGET_ARM64)
4130+ #if defined(TARGET_XARCH) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
4131+
4132+ // If 'test' is a single bit test, leaves the tested expr in the left op, the bit index in the right op, and returns
4133+ // true. Otherwise, returns false.
4134+ auto tryReduceSingleBitTestOps = [this ](GenTreeOp* test) -> bool {
4135+ assert (test->OperIs (GT_AND, GT_TEST_EQ, GT_TEST_NE));
4136+ GenTree* testedOp = test->gtOp1 ;
4137+ GenTree* bitOp = test->gtOp2 ;
4138+ #ifdef TARGET_RISCV64
4139+ if (bitOp->IsIntegralConstUnsignedPow2 ())
4140+ {
4141+ INT64 bit = bitOp->AsIntConCommon ()->IntegralValue ();
4142+ int log2 = BitOperations::Log2 ((UINT64)bit);
4143+ bitOp->AsIntConCommon ()->SetIntegralValue (log2);
4144+ return true ;
4145+ }
4146+ #endif
4147+ if (!bitOp->OperIs (GT_LSH))
4148+ std::swap (bitOp, testedOp);
4149+
4150+ if (bitOp->OperIs (GT_LSH) && varTypeIsIntOrI (bitOp) && bitOp->gtGetOp1 ()->IsIntegralConst (1 ))
4151+ {
4152+ BlockRange ().Remove (bitOp->gtGetOp1 ());
4153+ BlockRange ().Remove (bitOp);
4154+ test->gtOp1 = testedOp;
4155+ test->gtOp2 = bitOp->gtGetOp2 ();
4156+ return true ;
4157+ }
4158+ return false ;
4159+ };
4160+
41314161 ssize_t op2Value = op2->IconValue ();
41324162
41334163#ifdef TARGET_XARCH
@@ -4165,6 +4195,8 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
41654195 bool removeCast =
41664196#ifdef TARGET_ARM64
41674197 (op2Value == 0 ) && cmp->OperIs (GT_EQ, GT_NE, GT_GT) && !castOp->isContained () &&
4198+ #elif defined(TARGET_RISCV64)
4199+ false && // disable, comparisons and bit operations are full-register only
41684200#endif
41694201 (castOp->OperIs (GT_LCL_VAR, GT_CALL, GT_OR, GT_XOR, GT_AND)
41704202#ifdef TARGET_XARCH
@@ -4222,6 +4254,52 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
42224254 cmp->SetOperRaw (GenTree::ReverseRelop (cmp->OperGet ()));
42234255 }
42244256
4257+ #ifdef TARGET_RISCV64
4258+ if (op2Value == 0 && !andOp2->isContained () && tryReduceSingleBitTestOps (op1->AsOp ()))
4259+ {
4260+ GenTree* testedOp = op1->gtGetOp1 ();
4261+ GenTree* bitIndexOp = op1->gtGetOp2 ();
4262+
4263+ if (bitIndexOp->IsIntegralConst ())
4264+ {
4265+ // Shift the tested bit into the sign bit, then check if negative/positive.
4266+ // Work on whole registers because comparisons and compressed shifts are full-register only.
4267+ INT64 bitIndex = bitIndexOp->AsIntConCommon ()->IntegralValue ();
4268+ INT64 signBitIndex = genTypeSize (TYP_I_IMPL) * 8 - 1 ;
4269+ if (bitIndex < signBitIndex)
4270+ {
4271+ bitIndexOp->AsIntConCommon ()->SetIntegralValue (signBitIndex - bitIndex);
4272+ bitIndexOp->SetContained ();
4273+ op1->SetOperRaw (GT_LSH);
4274+ op1->gtType = TYP_I_IMPL;
4275+ }
4276+ else
4277+ {
4278+ // The tested bit is the sign bit, remove "AND bitIndex" and only check if negative/positive
4279+ assert (bitIndex == signBitIndex);
4280+ assert (genActualType (testedOp) == TYP_I_IMPL);
4281+ BlockRange ().Remove (bitIndexOp);
4282+ BlockRange ().Remove (op1);
4283+ cmp->AsOp ()->gtOp1 = testedOp;
4284+ }
4285+
4286+ op2->gtType = TYP_I_IMPL;
4287+ cmp->SetOperRaw (cmp->OperIs (GT_NE) ? GT_LT : GT_GE);
4288+ cmp->ClearUnsigned ();
4289+
4290+ return cmp;
4291+ }
4292+
4293+ // Shift the tested bit into the lowest bit, then AND with 1.
4294+ // The "EQ|NE 0" comparison is folded below as necessary.
4295+ var_types type = genActualType (testedOp);
4296+ op1->AsOp ()->gtOp1 = andOp1 = comp->gtNewOperNode (GT_RSH, type, testedOp, bitIndexOp);
4297+ op1->AsOp ()->gtOp2 = andOp2 = comp->gtNewIconNode (1 , type);
4298+ BlockRange ().InsertBefore (op1, andOp1, andOp2);
4299+ andOp2->SetContained ();
4300+ }
4301+ #endif // TARGET_RISCV64
4302+
42254303 // Optimizes (X & 1) != 0 to (X & 1)
42264304 // Optimizes (X & 1) == 0 to ((NOT X) & 1)
42274305 // (== 1 or != 1) cases are transformed to (!= 0 or == 0) above
@@ -4257,6 +4335,7 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
42574335
42584336 if (op2Value == 0 )
42594337 {
4338+ #ifndef TARGET_RISCV64
42604339 BlockRange ().Remove (op1);
42614340 BlockRange ().Remove (op2);
42624341
@@ -4300,6 +4379,7 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
43004379 }
43014380 }
43024381#endif
4382+ #endif // !TARGET_RISCV64
43034383 }
43044384 else if (andOp2->IsIntegralConst () && GenTree::Compare (andOp2, op2))
43054385 {
@@ -4328,31 +4408,15 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
43284408 // Note that BT has the same behavior as LSH when the bit index exceeds the
43294409 // operand bit size - it uses (bit_index MOD bit_size).
43304410 //
4331-
4332- GenTree* lsh = cmp->AsOp ()->gtOp1 ;
4333- GenTree* op = cmp->AsOp ()->gtOp2 ;
4334-
4335- if (!lsh->OperIs (GT_LSH))
4336- {
4337- std::swap (lsh, op);
4338- }
4339-
4340- if (lsh->OperIs (GT_LSH) && varTypeIsIntOrI (lsh) && lsh->gtGetOp1 ()->IsIntegralConst (1 ))
4411+ if (tryReduceSingleBitTestOps (cmp->AsOp ()))
43414412 {
43424413 cmp->SetOper (cmp->OperIs (GT_TEST_EQ) ? GT_BITTEST_EQ : GT_BITTEST_NE);
4343-
4344- BlockRange ().Remove (lsh->gtGetOp1 ());
4345- BlockRange ().Remove (lsh);
4346-
4347- cmp->AsOp ()->gtOp1 = op;
4348- cmp->AsOp ()->gtOp2 = lsh->gtGetOp2 ();
43494414 cmp->gtGetOp2 ()->ClearContained ();
4350-
43514415 return cmp->gtNext ;
43524416 }
43534417 }
43544418#endif // TARGET_XARCH
4355- #endif // defined(TARGET_XARCH) || defined(TARGET_ARM64)
4419+ #endif // defined(TARGET_XARCH) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
43564420
43574421 // Optimize EQ/NE(relop/SETCC, 0) into (maybe reversed) cond.
43584422 if (cmp->OperIs (GT_EQ, GT_NE) && op2->IsIntegralConst (0 ) && (op1->OperIsCompare () || op1->OperIs (GT_SETCC)))
0 commit comments