diff --git a/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/expression/OmniExpressionAdaptor.scala b/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/expression/OmniExpressionAdaptor.scala index 170393144eddc51db16bf41980cd1d7377c0cca9..a509af61ad2e786c4dbc28101e592c97c7d4de3d 100644 --- a/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/expression/OmniExpressionAdaptor.scala +++ b/omnioperator/omniop-spark-extension/java/src/main/scala/com/huawei/boostkit/spark/expression/OmniExpressionAdaptor.scala @@ -300,9 +300,10 @@ object OmniExpressionAdaptor extends Logging { private def unsupportedCastCheck(expr: Expression, cast: Cast): Unit = { def isDecimalOrStringType(dataType: DataType): Boolean = (dataType.isInstanceOf[DecimalType]) || (dataType.isInstanceOf[StringType]) - // not support Cast(string as !(decimal/string)) and Cast(!(decimal/string) as string) + // not support Cast(string as !(decimal/string)) and Cast(!(decimal/string) as string) and (string as !(Date/string)) if ((cast.dataType.isInstanceOf[StringType] && !isDecimalOrStringType(cast.child.dataType)) || - (!isDecimalOrStringType(cast.dataType) && cast.child.dataType.isInstanceOf[StringType])) { + (!isDecimalOrStringType(cast.dataType) && !cast.dataType.isInstanceOf[DateType] + && cast.child.dataType.isInstanceOf[StringType])) { throw new UnsupportedOperationException(s"Unsupported expression: $expr") } } @@ -500,6 +501,11 @@ object OmniExpressionAdaptor extends Logging { .format(sparkTypeToOmniExpJsonType(lower.dataType), rewriteToOmniJsonExpressionLiteral(lower.child, exprsIndexMap)) + case upper: Upper => + "{\"exprType\":\"FUNCTION\",\"returnType\":%s,\"function_name\":\"upper\", \"arguments\":[%s]}" + .format(sparkTypeToOmniExpJsonType(upper.dataType), + rewriteToOmniJsonExpressionLiteral(upper.child, exprsIndexMap)) + case length: Length => "{\"exprType\":\"FUNCTION\",\"returnType\":%s,\"function_name\":\"length\", \"arguments\":[%s]}" .format(sparkTypeToOmniExpJsonType(length.dataType), diff --git a/omnioperator/omniop-spark-extension/java/src/test/scala/org/apache/spark/sql/execution/forsql/ColumnarBuiltInFuncSuite.scala b/omnioperator/omniop-spark-extension/java/src/test/scala/org/apache/spark/sql/execution/forsql/ColumnarBuiltInFuncSuite.scala index ce3e7ab8576a47a40e7a434847547b0978824043..cb2c419ccc1ae9224825fc878a4ebec7f8a3e666 100644 --- a/omnioperator/omniop-spark-extension/java/src/test/scala/org/apache/spark/sql/execution/forsql/ColumnarBuiltInFuncSuite.scala +++ b/omnioperator/omniop-spark-extension/java/src/test/scala/org/apache/spark/sql/execution/forsql/ColumnarBuiltInFuncSuite.scala @@ -459,4 +459,124 @@ class ColumnarBuiltInFuncSuite extends ColumnarSparkPlanTest{ ) ) } + + // upper + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper with normal") { + val res = spark.sql("select upper(char_normal) from builtin_table") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row("CHAR1 R"), + Row("CHAR2 "), + Row("CHAR3 "), + Row("CHAR4 ") + ) + ) + } + + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper with null") { + val res = spark.sql("select upper(char_null) from builtin_table") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row(null), + Row("CHAR200 "), + Row("CHAR300 "), + Row(null) + ) + ) + } + + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper with space/empty string") { + val res = spark.sql("select upper(varchar_empty) from builtin_table") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row(" VARCHAR100 "), + Row(""), + Row("VARCHAR300"), + Row("VARCHAR400") + ) + ) + } + + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper-upper") { + val res = spark.sql("select upper(char_null), upper(varchar_null) from builtin_table") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row(null, null), + Row("CHAR200 ", "VARCHAR2"), + Row("CHAR300 ", "VARCHAR3"), + Row(null, "VARCHAR4"), + ) + ) + } + + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper(upper())") { + val res = spark.sql("select upper(upper(char_null)) from builtin_table") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row(null), + Row("CHAR200 "), + Row("CHAR300 "), + Row(null) + ) + ) + } + + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper with subQuery") { + val res = spark.sql("select upper(l) from (select upper(char_normal) as l from builtin_table)") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row("CHAR1 R"), + Row("CHAR2 "), + Row("CHAR3 "), + Row("CHAR4 ") + ) + ) + } + + test("Test ColumnarProjectExec happen and result is same as native " + + "when execute upper with ch") { + val res = spark.sql("select upper(ch_col) from builtin_table") + val executedPlan = res.queryExecution.executedPlan + assert(executedPlan.find(_.isInstanceOf[ColumnarProjectExec]).isDefined, s"ColumnarProjectExec not happened, executedPlan as follows: \n$executedPlan") + assert(executedPlan.find(_.isInstanceOf[ProjectExec]).isEmpty, s"ProjectExec happened, executedPlan as follows: \n$executedPlan") + checkAnswer( + res, + Seq( + Row("中文1"), + Row("中文2"), + Row("中文3"), + Row("中文4") + ) + ) + } }