Skip to content

Commit

Permalink
Support predicate pushdown for DATETIME (#1216)
Browse files Browse the repository at this point in the history
  • Loading branch information
isha97 authored and davidrabinowitz committed May 7, 2024
1 parent 02fd850 commit 718efd6
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 0 deletions.
2 changes: 2 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

## Next

* Issue #1215: Support predicate pushdown for DATETIME

## 0.36.1 - 2024-01-31

* PR #1176: fix timestamp filter translation issue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Map;
import java.util.Optional;
Expand Down Expand Up @@ -332,6 +334,10 @@ static String compileValue(Object value, char arrayStart, char arrayEnd) {
// Instant uses ISO-8601 representation.
return "TIMESTAMP '" + instant.toString() + "'";
}
if (value instanceof LocalDateTime) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSS");
return "DATETIME '" + ((LocalDateTime) value).format(formatter) + "'";
}
if (value instanceof Object[]) {
return Arrays.stream((Object[]) value)
.map(SparkFilterUtils::compileValue)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -618,4 +618,23 @@ public void testReadFilteredTimestampField() {
assertThat(head.get(head.fieldIndex("eventTime")))
.isEqualTo(Timestamp.valueOf("2023-01-09 02:00:00"));
}

@Test
public void testPushDateTimePredicate() {
IntegrationTestUtils.runQuery(
String.format(
"CREATE TABLE `%s.%s` (%s INTEGER, %s DATETIME) "
+ "AS SELECT * FROM UNNEST([(1, DATETIME '2023-09-25 1:00:00'), "
+ "(2, DATETIME '2023-09-29 10:00:00'), (3, DATETIME '2023-10-30 17:30:00')])",
testDataset, testTable, "orderId", "orderDateTime"));
Dataset<Row> df =
spark
.read()
.format("bigquery")
.option("dataset", testDataset.toString())
.option("table", testTable)
.load()
.where("orderDateTime < '2023-10-25 10:00:00'");
assertThat(df.count()).isEqualTo(2);
}
}

0 comments on commit 718efd6

Please sign in to comment.