From e4dd93814a893bd606276cffa9ac9c84a2282246 Mon Sep 17 00:00:00 2001 From: huanglong Date: Fri, 11 Oct 2024 10:29:06 +0800 Subject: [PATCH] reduce simpleProject Signed-off-by: huanglong --- .../com/huawei/boostkit/spark/ColumnarPlugin.scala | 12 ++++++++++++ .../boostkit/spark/hive/HiveResourceSuite.scala | 2 -- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/ColumnarPlugin.scala b/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/ColumnarPlugin.scala index d19d1a467..d840e239c 100644 --- a/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/ColumnarPlugin.scala +++ b/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/ColumnarPlugin.scala @@ -145,6 +145,18 @@ case class ColumnarPreOverrides(isSupportAdaptive: Boolean = true) } else { ColumnarProjectExec(plan.projectList, child) } + case scan: ColumnarFileSourceScanExec if (plan.projectList.forall(project => OmniExpressionAdaptor.isSimpleProjectForAll(project))) => + ColumnarFileSourceScanExec( + scan.relation, + plan.output, + scan.requiredSchema, + scan.partitionFilters, + scan.optionalBucketSet, + scan.optionalNumCoalescedBuckets, + scan.dataFilters, + scan.tableIdentifier, + scan.disableBucketedScan + ) case _ => ColumnarProjectExec(plan.projectList, child) } diff --git a/omnioperator/omniop-spark-extension/java/src/test/scala/com/huawei/boostkit/spark/hive/HiveResourceSuite.scala b/omnioperator/omniop-spark-extension/java/src/test/scala/com/huawei/boostkit/spark/hive/HiveResourceSuite.scala index ec03b2753..b4620a716 100644 --- a/omnioperator/omniop-spark-extension/java/src/test/scala/com/huawei/boostkit/spark/hive/HiveResourceSuite.scala +++ b/omnioperator/omniop-spark-extension/java/src/test/scala/com/huawei/boostkit/spark/hive/HiveResourceSuite.scala @@ -56,7 +56,6 @@ class HiveResourceSuite extends SparkFunSuite { test("queryBySparkSql-HiveDataSource") { runner.runQuery("q1", 1) - runner.runQuery("q2", 1) runner.runQuery("q3", 1) runner.runQuery("q4", 1) runner.runQuery("q5", 1) @@ -159,7 +158,6 @@ class HiveResourceSuite extends SparkFunSuite { runner.runQuery("q98", 1) runner.runQuery("q99", 1) } - def readConf(properties: Properties): SparkConf = { val conf = new SparkConf() val wholeStage = properties.getProperty("spark.sql.codegen.wholeStage") -- Gitee