diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index c3d66340c4..d028d4467e 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -2401,8 +2401,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } test("get_struct_field - select primitive fields") { - val scanImpl = CometConf.COMET_NATIVE_SCAN_IMPL.get() - assume(!(scanImpl == CometConf.SCAN_AUTO && CometSparkSessionExtensions.isSpark40Plus)) withTempPath { dir => // create input file with Comet disabled withSQLConf(CometConf.COMET_ENABLED.key -> "false") { @@ -2420,8 +2418,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } test("get_struct_field - select subset of struct") { - val scanImpl = CometConf.COMET_NATIVE_SCAN_IMPL.get() - assume(!(scanImpl == CometConf.SCAN_AUTO && CometSparkSessionExtensions.isSpark40Plus)) withTempPath { dir => // create input file with Comet disabled withSQLConf(CometConf.COMET_ENABLED.key -> "false") { @@ -2449,8 +2445,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } test("get_struct_field - read entire struct") { - val scanImpl = CometConf.COMET_NATIVE_SCAN_IMPL.get() - assume(!(scanImpl == CometConf.SCAN_AUTO && CometSparkSessionExtensions.isSpark40Plus)) withTempPath { dir => // create input file with Comet disabled withSQLConf(CometConf.COMET_ENABLED.key -> "false") { diff --git a/spark/src/test/scala/org/apache/comet/CometMathExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometMathExpressionSuite.scala index 8698964ce6..9d27f2d25f 100644 --- a/spark/src/test/scala/org/apache/comet/CometMathExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometMathExpressionSuite.scala @@ -92,8 +92,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe DataGenOptions(generateNegativeZero = generateNegativeZero)) } - // https://github.com/apache/datafusion-comet/issues/3561 - ignore("width_bucket") { + test("width_bucket") { assume(isSpark35Plus, "width_bucket was added in Spark 3.5") withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") { spark @@ -106,8 +105,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe } } - // https://github.com/apache/datafusion-comet/issues/3561 - ignore("width_bucket - edge cases") { + test("width_bucket - edge cases") { assume(isSpark35Plus, "width_bucket was added in Spark 3.5") withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") { spark @@ -124,8 +122,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe } } - // https://github.com/apache/datafusion-comet/issues/3561 - ignore("width_bucket - NaN values") { + test("width_bucket - NaN values") { assume(isSpark35Plus, "width_bucket was added in Spark 3.5") withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") { spark @@ -137,8 +134,7 @@ class CometMathExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelpe } } - // https://github.com/apache/datafusion-comet/issues/3561 - ignore("width_bucket - with range data") { + test("width_bucket - with range data") { assume(isSpark35Plus, "width_bucket was added in Spark 3.5") withSQLConf("spark.comet.exec.localTableScan.enabled" -> "true") { spark