Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.gluten.functions

import org.apache.gluten.execution.ProjectExecTransformer
import org.apache.gluten.execution.{BatchScanExecTransformer, ProjectExecTransformer}

import org.apache.spark.sql.execution.ProjectExec
import org.apache.spark.sql.types.Decimal
Expand Down Expand Up @@ -218,6 +218,59 @@ class DateFunctionsValidateSuite extends FunctionsValidateSuite {
}
}

test("make_timestamp_ntz") {
withTempPath {
path =>
Seq(
(2017, 7, 11, 6, 30, Decimal(45678000, 18, 6)),
(1, 1, 1, 1, 1, Decimal(1, 18, 6)),
(1, 1, 1, 1, 1, null)
)
.toDF("year", "month", "day", "hour", "min", "sec")
.write
.parquet(path.getCanonicalPath)

spark.read
.parquet(path.getCanonicalPath)
.createOrReplaceTempView("make_timestamp_ntz_tbl")

runQueryAndCompare(
"select make_timestamp_ntz(year, month, day, hour, min, sec) " +
"from make_timestamp_ntz_tbl") {
checkGlutenPlan[BatchScanExecTransformer]
}
}
}

test("try_make_timestamp_ntz") {
val tryMakeTimestampNtzAvailable =
spark.catalog.listFunctions().collect().exists(_.name.equalsIgnoreCase("try_make_timestamp_ntz"))

if (tryMakeTimestampNtzAvailable) {
withTempPath {
path =>
Seq(
(2017, 7, 11, 6, 30, Decimal(45678000, 18, 6)),
(1, 1, 1, 1, 1, Decimal(1, 18, 6)),
(1, 1, 1, 1, 1, null)
)
.toDF("year", "month", "day", "hour", "min", "sec")
.write
.parquet(path.getCanonicalPath)

spark.read
.parquet(path.getCanonicalPath)
.createOrReplaceTempView("try_make_timestamp_ntz_tbl")

runQueryAndCompare(
"select try_make_timestamp_ntz(year, month, day, hour, min, sec) from " +
"try_make_timestamp_ntz_tbl") {
checkGlutenPlan[BatchScanExecTransformer]
}
}
}
}

test("make_ym_interval") {
runQueryAndCompare("select make_ym_interval(1, 1)") {
checkGlutenPlan[ProjectExecTransformer]
Expand Down Expand Up @@ -326,6 +379,21 @@ class DateFunctionsValidateSuite extends FunctionsValidateSuite {
}
}

test("to_timestamp_ntz") {
withTempPath {
path =>
val t1 = "2015-07-22 10:00:00"
val t2 = "2014-12-31 23:59:59"
val t3 = "2014-12-31 23:59:58"
Seq(t1, t2, t3).toDF("t").write.parquet(path.getCanonicalPath)

spark.read.parquet(path.getCanonicalPath).createOrReplaceTempView("time")
runQueryAndCompare("select to_timestamp_ntz(t, 'yyyy-MM-dd HH:mm:ss') from time") {
checkGlutenPlan[BatchScanExecTransformer]
}
}
}

test("to_utc_timestamp") {
withTempPath {
path =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1580,6 +1580,65 @@ abstract class ScalarFunctionsValidateSuite extends FunctionsValidateSuite {
}
}

testWithMinSparkVersion("make_timestamp_ntz with validation enabled", "3.4") {
withSQLConf("spark.gluten.sql.columnar.backend.velox.enableTimestampNtzValidation" -> "true") {
val df = spark.sql(
"SELECT l_orderkey, make_timestamp_ntz(2024, 5, 22, 10, 30, 0.0) FROM lineitem LIMIT 1")
checkFallbackOperators(df, 1)
df.collect()
}
}

testWithMinSparkVersion("make_timestamp_ntz with validation disabled", "3.4") {
withSQLConf("spark.gluten.sql.columnar.backend.velox.enableTimestampNtzValidation" -> "false") {
val df = spark.sql(
"SELECT l_orderkey, make_timestamp_ntz(2024, 5, 22, 10, 30, 0.0) FROM lineitem LIMIT 1")
val optimizedPlan = df.queryExecution.optimizedPlan.toString()
checkGlutenPlan[ProjectExecTransformer](df)
checkFallbackOperators(df, 0)
df.collect()
}
}

testWithMinSparkVersion("try_make_timestamp_ntz with validation enabled", "3.4") {
withSQLConf("spark.gluten.sql.columnar.backend.velox.enableTimestampNtzValidation" -> "true") {
withTempView("try_make_timestamp_ntz_tbl") {
Seq(
(2017, 7, 11, 6, 30, 0.0),
(2024, 5, 22, 10, 30, 0.0),
(1, 1, 1, 1, 1, null.asInstanceOf[java.lang.Double])
).toDF("year", "month", "day", "hour", "min", "sec")
.createOrReplaceTempView("try_make_timestamp_ntz_tbl")

val df = spark.sql(
"SELECT try_make_timestamp_ntz(year, month, day, hour, min, sec) " +
"FROM try_make_timestamp_ntz_tbl")
checkFallbackOperators(df, 1)
df.collect()
}
}
}

testWithMinSparkVersion("try_make_timestamp_ntz with validation disabled", "3.4") {
withSQLConf("spark.gluten.sql.columnar.backend.velox.enableTimestampNtzValidation" -> "false") {
withTempView("try_make_timestamp_ntz_tbl") {
Seq(
(2017, 7, 11, 6, 30, 0.0),
(2024, 5, 22, 10, 30, 0.0),
(1, 1, 1, 1, 1, null.asInstanceOf[java.lang.Double])
).toDF("year", "month", "day", "hour", "min", "sec")
.createOrReplaceTempView("try_make_timestamp_ntz_tbl")

val df = spark.sql(
"SELECT try_make_timestamp_ntz(year, month, day, hour, min, sec) " +
"FROM try_make_timestamp_ntz_tbl")
checkGlutenPlan[ProjectExecTransformer](df)
checkFallbackOperators(df, 0)
df.collect()
}
}
}

testWithMinSparkVersion("localtimestamp with validation enabled", "3.4") {
// With validation enabled (default), localtimestamp should fallback to Spark
// because it returns TimestampNTZType
Expand Down
Loading